From c02f0a5c400b0d55a3709ab2db60e4a336073e2d Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 26 Oct 2023 16:21:42 +0200 Subject: [PATCH 001/111] Run builds and tests against Altinity's CI/CD infrastructure --- .github/retry.sh | 22 + .github/workflows/backport_branches.yml | 6 +- .github/workflows/cherry_pick.yml | 45 - .github/workflows/docs_check.yml | 176 - .github/workflows/jepsen.yml | 68 - .github/workflows/master.yml | 6 +- .github/workflows/nightly.yml | 136 - .github/workflows/pull_request.yml | 5278 ----------------- .github/workflows/release.yml | 65 - .github/workflows/release_branches.yml | 2759 ++++----- .github/workflows/tags_stable.yml | 72 - .github/workflows/woboq.yml | 44 - cmake/autogenerated_versions.txt | 8 +- cmake/version.cmake | 2 +- docker/images.json | 99 +- docker/packager/binary/Dockerfile | 9 +- docker/packager/binary/build.sh | 4 + docker/packager/packager | 6 +- docker/test/base/Dockerfile | 4 +- docker/test/codebrowser/Dockerfile | 2 +- docker/test/fasttest/Dockerfile | 2 +- docker/test/fuzzer/Dockerfile | 2 +- docker/test/integration/base/Dockerfile | 2 +- .../compose/docker_compose_clickhouse.yml | 2 +- .../compose/docker_compose_dotnet_client.yml | 2 +- .../compose/docker_compose_jdbc_bridge.yml | 3 +- .../runner/compose/docker_compose_keeper.yml | 6 +- .../docker_compose_kerberized_hdfs.yml | 4 +- .../docker_compose_kerberized_kafka.yml | 2 +- .../compose/docker_compose_kerberos_kdc.yml | 2 +- .../runner/compose/docker_compose_minio.yml | 6 +- .../docker_compose_mysql_golang_client.yml | 2 +- .../docker_compose_mysql_java_client.yml | 2 +- .../docker_compose_mysql_js_client.yml | 2 +- .../docker_compose_mysql_php_client.yml | 2 +- .../runner/compose/docker_compose_nginx.yml | 2 +- .../docker_compose_postgresql_java_client.yml | 2 +- .../integration/runner/dockerd-entrypoint.sh | 4 +- docker/test/keeper-jepsen/Dockerfile | 2 +- docker/test/performance-comparison/Dockerfile | 2 +- docker/test/server-jepsen/Dockerfile | 2 +- docker/test/stateful/Dockerfile | 5 +- docker/test/stateful/setup_minio.sh | 92 +- ...sts possible in Altinity's infrastructure) | 77 + docker/test/stateless/Dockerfile | 4 +- docker/test/stateless/setup_minio.sh | 1 + docker/test/stateless_pytest/Dockerfile | 33 + docker/test/stress/Dockerfile | 2 +- docker/test/unit/Dockerfile | 2 +- docker/test/upgrade/Dockerfile | 2 +- packages/clickhouse-client.yaml | 4 +- packages/clickhouse-common-static-dbg.yaml | 4 +- packages/clickhouse-common-static.yaml | 9 +- packages/clickhouse-keeper-dbg.yaml | 4 +- packages/clickhouse-keeper.yaml | 4 +- packages/clickhouse-server.yaml | 4 +- .../internal/collectors/system/system_test.go | 30 +- .../internal/platform/database/native_test.go | 24 +- .../internal/platform/manager_test.go | 2 +- .../internal/platform/utils/process_test.go | 2 +- programs/diagnostics/internal/runner_test.go | 2 +- tests/ci/ast_fuzzer_check.py | 4 +- tests/ci/build_check.py | 69 +- tests/ci/ccache_utils.py | 2 +- tests/ci/ci_config.py | 3 +- tests/ci/clickhouse_helper.py | 2 +- tests/ci/codebrowser_check.py | 2 +- tests/ci/compatibility_check.py | 6 +- tests/ci/docker_images_check.py | 53 +- tests/ci/docker_manifests_merge.py | 6 +- tests/ci/docker_pull_helper.py | 23 +- tests/ci/docker_server.py | 16 +- tests/ci/docker_test.py | 76 +- tests/ci/docs_check.py | 4 +- tests/ci/env_helper.py | 8 +- tests/ci/fast_test_check.py | 4 +- tests/ci/functional_test_check.py | 19 +- tests/ci/get_robot_token.py | 14 +- tests/ci/git_helper.py | 4 +- tests/ci/git_test.py | 6 + tests/ci/install_check.py | 4 +- tests/ci/integration_test_check.py | 36 +- tests/ci/jepsen_check.py | 6 +- tests/ci/performance_comparison_check.py | 2 +- tests/ci/sign_release.py | 94 + tests/ci/sqlancer_check.py | 2 +- tests/ci/sqllogic_test.py | 2 +- tests/ci/sqltest.py | 2 +- tests/ci/stress_check.py | 4 +- tests/ci/style_check.py | 4 +- tests/ci/tests/docker_images_for_tests.json | 98 +- tests/ci/unit_tests_check.py | 4 +- tests/ci/upgrade_check.py | 2 +- tests/ci/version_helper.py | 37 +- tests/integration/ci-runner.py | 28 +- tests/integration/helpers/cluster.py | 7 +- tests/integration/helpers/network.py | 2 +- tests/integration/runner | 22 +- .../test_functions.py | 1 + .../test_insert_profile_events.py | 1 + .../test_ip_types_binary_compatibility.py | 1 + ...test_vertical_merges_from_compact_parts.py | 1 + .../test.py | 1 + tests/integration/test_storage_kafka/test.py | 12 + utils/clickhouse-docker | 4 +- utils/zero_copy/zero_copy_schema_converter.py | 2 +- 106 files changed, 2041 insertions(+), 7832 deletions(-) create mode 100755 .github/retry.sh delete mode 100644 .github/workflows/cherry_pick.yml delete mode 100644 .github/workflows/docs_check.yml delete mode 100644 .github/workflows/jepsen.yml delete mode 100644 .github/workflows/nightly.yml delete mode 100644 .github/workflows/pull_request.yml delete mode 100644 .github/workflows/release.yml delete mode 100644 .github/workflows/tags_stable.yml delete mode 100644 .github/workflows/woboq.yml mode change 120000 => 100755 docker/test/stateful/setup_minio.sh create mode 100755 docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) create mode 100644 docker/test/stateless_pytest/Dockerfile create mode 100644 tests/ci/sign_release.py diff --git a/.github/retry.sh b/.github/retry.sh new file mode 100755 index 000000000000..566c2cf11315 --- /dev/null +++ b/.github/retry.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Execute command until exitcode is 0 or +# maximum number of retries is reached +# Example: +# ./retry +retries=$1 +delay=$2 +command="${@:3}" +exitcode=0 +try=0 +until [ "$try" -ge $retries ] +do + echo "$command" + eval "$command" + exitcode=$? + if [ $exitcode -eq 0 ]; then + break + fi + try=$((try+1)) + sleep $2 +done +exit $exitcode diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index c20255579e2d..4a5703450bdd 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -487,13 +487,13 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + --image-repo altinityinfra/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml deleted file mode 100644 index 8d1e20559780..000000000000 --- a/.github/workflows/cherry_pick.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: CherryPick - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: cherry-pick -on: # yamllint disable-line rule:truthy - schedule: - - cron: '0 * * * *' - workflow_dispatch: - -jobs: - CherryPick: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - # https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/cherry_pick - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_check - REPO_COPY=${{runner.temp}}/docs_check/ClickHouse - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Docs Check - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 docs_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FinishCheck: - needs: - - StyleCheck - - DockerHubPush - - DocsCheck - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Finish label - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 finish_check.py - python3 merge_pr.py --check-approved diff --git a/.github/workflows/jepsen.yml b/.github/workflows/jepsen.yml deleted file mode 100644 index e67df15c4d36..000000000000 --- a/.github/workflows/jepsen.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: JepsenWorkflow -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 -concurrency: - group: jepsen -on: # yamllint disable-line rule:truthy - schedule: - - cron: '0 */6 * * *' - workflow_dispatch: - workflow_call: -jobs: - KeeperJepsenRelease: - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/keeper_jepsen - REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 - - name: Jepsen Test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 jepsen_check.py keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - # ServerJepsenRelease: - # runs-on: [self-hosted, style-checker] - # if: ${{ always() }} - # needs: [KeeperJepsenRelease] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/server_jepsen - # REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse - # EOF - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # fetch-depth: 0 - # - name: Jepsen Test - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 jepsen_check.py server - # - name: Cleanup - # if: always() - # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: - # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - # sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 4771e5842666..eda455b3c3a6 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -963,13 +963,13 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ - --image-repo clickhouse/clickhouse-server --image-path docker/server + --image-repo altinityinfra/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index 8162dc37223e..000000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,136 +0,0 @@ -name: NightlyBuilds - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -"on": - schedule: - - cron: '13 3 * * *' - workflow_dispatch: - -jobs: - Debug: - # The task for having a preserved ENV and event.json for later investigation - uses: ./.github/workflows/debug.yml - DockerHubPushAarch64: - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 --all - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix amd64 --all - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/changed_images.json - Codebrowser: - needs: [DockerHubPush] - uses: ./.github/workflows/woboq.yml - SonarCloud: - runs-on: [self-hosted, builder] - env: - SONAR_SCANNER_VERSION: 4.8.0.2856 - SONAR_SERVER_URL: "https://sonarcloud.io" - BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed - CC: clang-16 - CXX: clang++-16 - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - submodules: true - - name: Set up JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Download and set up sonar-scanner - env: - SONAR_SCANNER_DOWNLOAD_URL: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${{ env.SONAR_SCANNER_VERSION }}-linux.zip - run: | - mkdir -p "$HOME/.sonar" - curl -sSLo "$HOME/.sonar/sonar-scanner.zip" "${{ env.SONAR_SCANNER_DOWNLOAD_URL }}" - unzip -o "$HOME/.sonar/sonar-scanner.zip" -d "$HOME/.sonar/" - echo "$HOME/.sonar/sonar-scanner-${{ env.SONAR_SCANNER_VERSION }}-linux/bin" >> "$GITHUB_PATH" - - name: Download and set up build-wrapper - env: - BUILD_WRAPPER_DOWNLOAD_URL: ${{ env.SONAR_SERVER_URL }}/static/cpp/build-wrapper-linux-x86.zip - run: | - curl -sSLo "$HOME/.sonar/build-wrapper-linux-x86.zip" "${{ env.BUILD_WRAPPER_DOWNLOAD_URL }}" - unzip -o "$HOME/.sonar/build-wrapper-linux-x86.zip" -d "$HOME/.sonar/" - echo "$HOME/.sonar/build-wrapper-linux-x86" >> "$GITHUB_PATH" - - name: Set Up Build Tools - run: | - sudo apt-get update - sudo apt-get install -yq git cmake ccache ninja-build python3 yasm nasm - sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" - - name: Run build-wrapper - run: | - mkdir build - cd build - cmake .. - cd .. - build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/ - - name: Run sonar-scanner - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: | - sonar-scanner \ - --define sonar.host.url="${{ env.SONAR_SERVER_URL }}" \ - --define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \ - --define sonar.projectKey="ClickHouse_ClickHouse" \ - --define sonar.organization="clickhouse-java" \ - --define sonar.cfamily.cpp23.enabled=true \ - --define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql" diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml deleted file mode 100644 index f72c3d069f16..000000000000 --- a/.github/workflows/pull_request.yml +++ /dev/null @@ -1,5278 +0,0 @@ -name: PullRequestCI - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -on: # yamllint disable-line rule:truthy - pull_request: - types: - - synchronize - - reopened - - opened - branches: - - master - paths-ignore: - - 'CHANGELOG.md' - - 'README.md' - - 'SECURITY.md' - - 'docker/docs/**' - - 'docs/**' - - 'utils/check-style/aspell-ignore/**' - - 'tests/ci/docs_check.py' -########################################################################################## -##################################### SMALL CHECKS ####################################### -########################################################################################## -jobs: - CheckLabels: - runs-on: [self-hosted, style-checker] - # Run the first check always, even if the CI is cancelled - if: ${{ always() }} - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Labels check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 run_check.py - PythonUnitTests: - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Python unit tests - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - echo "Testing the main ci directory" - python3 -m unittest discover -s . -p '*_test.py' - for dir in *_lambda/; do - echo "Testing $dir" - python3 -m unittest discover -s "$dir" -p '*_test.py' - done - DockerHubPushAarch64: - needs: CheckLabels - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json - DockerHubPushAmd64: - needs: CheckLabels - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_images_check.py --suffix amd64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json - DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests] - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags - - name: Download changed aarch64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_aarch64 - path: ${{ runner.temp }} - - name: Download changed amd64 images - uses: actions/download-artifact@v3 - with: - name: changed_images_amd64 - path: ${{ runner.temp }} - - name: Images check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 - - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/changed_images.json - StyleCheck: - needs: DockerHubPush - runs-on: [self-hosted, style-checker] - # We need additional `&& ! cancelled()` to have the job being able to cancel - if: ${{ success() || failure() || ( always() && ! cancelled() ) }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/fasttest - REPO_COPY=${{runner.temp}}/fasttest/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Fast Test - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 fast_test_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - CompatibilityCheckX86: - needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - CompatibilityCheckAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -######################################################################################### -#################################### ORDINARY BUILDS #################################### -######################################################################################### - BuilderDebRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # for performance artifact - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - BuilderBinRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # for performance artifact - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebUBsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebTsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebMsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebDebug: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -########################################################################################## -##################################### SPECIAL BUILDS ##################################### -########################################################################################## - BuilderBinClangTidy: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_tidy - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwin: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinFreeBSD: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_freebsd - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinPPC64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_ppc64le - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinAmd64Compat: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_amd64_compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinAarch64V80Compat: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64_v80compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinRISCV64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_riscv64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinS390X: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_s390x - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -############################################################################################ -##################################### Docker images ####################################### -############################################################################################ - DockerServerImages: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################ -##################################### BUILD REPORTER ####################################### -############################################################################################ - BuilderReport: - needs: - - BuilderBinRelease - - BuilderDebAarch64 - - BuilderDebAsan - - BuilderDebDebug - - BuilderDebMsan - - BuilderDebRelease - - BuilderDebTsan - - BuilderDebUBsan - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - BuilderSpecialReport: - needs: - - BuilderBinAarch64 - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - - BuilderBinFreeBSD - - BuilderBinPPC64 - - BuilderBinRISCV64 - - BuilderBinS390X - - BuilderBinAmd64Compat - - BuilderBinAarch64V80Compat - - BuilderBinClangTidy - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################ -#################################### INSTALL PACKAGES ###################################### -############################################################################################ - InstallPackagesTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - InstallPackagesTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -########################### FUNCTIONAl STATELESS TESTS ####################################### -############################################################################################## - FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseWideParts: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_wide_parts - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, wide parts enabled) - REPO_COPY=${{runner.temp}}/stateless_wide_parts/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseAnalyzer: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_analyzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, analyzer) - REPO_COPY=${{runner.temp}}/stateless_analyzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug0: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug5: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan0: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan1: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan3: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan4: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan5: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestFlakyCheck: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_flaky_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests flaky check (asan) - REPO_COPY=${{runner.temp}}/stateless_flaky_asan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - TestsBugfixCheck: - needs: [CheckLabels, StyleCheck] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/tests_bugfix_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=tests bugfix validate check - KILL_TIMEOUT=3600 - REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Bugfix test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - - TEMP_PATH="${TEMP_PATH}/integration" \ - REPORTS_PATH="${REPORTS_PATH}/integration" \ - python3 integration_test_check.py "Integration $CHECK_NAME" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - - TEMP_PATH="${TEMP_PATH}/stateless" \ - REPORTS_PATH="${REPORTS_PATH}/stateless" \ - python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - - python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -############################ FUNCTIONAl STATEFUL TESTS ####################################### -############################################################################################## - FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - # Parallel replicas - FunctionalStatefulTestDebugParallelReplicas: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestUBsanParallelReplicas: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestMsanParallelReplicas: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestTsanParallelReplicas: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAsanParallelReplicas: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestReleaseParallelReplicas: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -######################################### STRESS TESTS ####################################### -############################################################################################## - StressTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestTsan: - needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################## - ######################################### UPGRADE CHECK ###################################### - ############################################################################################## - UpgradeCheckAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (asan) - REPO_COPY=${{runner.temp}}/upgrade_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UpgradeCheckTsan: - needs: [BuilderDebTsan] - # same as for stress test with tsan - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (tsan) - REPO_COPY=${{runner.temp}}/upgrade_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UpgradeCheckMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (msan) - REPO_COPY=${{runner.temp}}/upgrade_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UpgradeCheckDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (debug) - REPO_COPY=${{runner.temp}}/upgrade_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -##################################### AST FUZZERS ############################################ -############################################################################################## - ASTFuzzerTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (asan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (tsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestUBSan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ubsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestMSan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (msan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ASTFuzzerTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -############################# INTEGRATION TESTS ############################################# -############################################################################################# - IntegrationTestsAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan5: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsFlakyCheck: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan_flaky_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests flaky check (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -#################################### UNIT TESTS ############################################# -############################################################################################# - UnitTestsAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsReleaseClang: - needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - UnitTestsUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -#################################### PERFORMANCE TESTS ###################################### -############################################################################################# - PerformanceComparisonX86-0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -###################################### SQLANCER FUZZERS ###################################### -############################################################################################## - SQLancerTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (release) - REPO_COPY=${{runner.temp}}/sqlancer_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - SQLancerTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (debug) - REPO_COPY=${{runner.temp}}/sqlancer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -###################################### JEPSEN TESTS ######################################### -############################################################################################# - Jepsen: - # This is special test NOT INCLUDED in FinishCheck - # When it's skipped, all dependent tasks will be skipped too. - # DO NOT add it there - if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') - needs: [BuilderBinRelease] - uses: ./.github/workflows/jepsen.yml - FinishCheck: - needs: - - StyleCheck - - DockerHubPush - - DockerServerImages - - CheckLabels - - BuilderReport - - BuilderSpecialReport - - FastTest - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestDebug3 - - FunctionalStatelessTestDebug4 - - FunctionalStatelessTestRelease - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseDatabaseReplicated2 - - FunctionalStatelessTestReleaseDatabaseReplicated3 - - FunctionalStatelessTestReleaseWideParts - - FunctionalStatelessTestReleaseAnalyzer - - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestAsan2 - - FunctionalStatelessTestAsan3 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestTsan3 - - FunctionalStatelessTestTsan4 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestMsan3 - - FunctionalStatelessTestMsan4 - - FunctionalStatelessTestMsan5 - - FunctionalStatelessTestUBsan0 - - FunctionalStatelessTestUBsan1 - - FunctionalStatefulTestDebug - - FunctionalStatefulTestRelease - - FunctionalStatefulTestAarch64 - - FunctionalStatefulTestAsan - - FunctionalStatefulTestTsan - - FunctionalStatefulTestMsan - - FunctionalStatefulTestUBsan - - FunctionalStatelessTestReleaseS3_0 - - FunctionalStatelessTestReleaseS3_1 - - FunctionalStatelessTestS3Debug0 - - FunctionalStatelessTestS3Debug1 - - FunctionalStatelessTestS3Debug2 - - FunctionalStatelessTestS3Debug4 - - FunctionalStatelessTestS3Debug5 - - FunctionalStatelessTestS3Tsan0 - - FunctionalStatelessTestS3Tsan1 - - FunctionalStatelessTestS3Tsan2 - - FunctionalStatelessTestS3Tsan4 - - StressTestDebug - - StressTestAsan - - StressTestTsan - - StressTestMsan - - StressTestUBsan - - ASTFuzzerTestDebug - - ASTFuzzerTestAsan - - ASTFuzzerTestTsan - - ASTFuzzerTestMSan - - ASTFuzzerTestUBSan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsAsan3 - - IntegrationTestsAsan4 - - IntegrationTestsAsan5 - - IntegrationTestsAnalyzerAsan0 - - IntegrationTestsAnalyzerAsan1 - - IntegrationTestsAnalyzerAsan2 - - IntegrationTestsAnalyzerAsan3 - - IntegrationTestsAnalyzerAsan4 - - IntegrationTestsAnalyzerAsan5 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsRelease2 - - IntegrationTestsRelease3 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - IntegrationTestsTsan4 - - IntegrationTestsTsan5 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 - - PerformanceComparisonAarch-0 - - PerformanceComparisonAarch-1 - - PerformanceComparisonAarch-2 - - PerformanceComparisonAarch-3 - - UnitTestsAsan - - UnitTestsTsan - - UnitTestsMsan - - UnitTestsUBsan - - UnitTestsReleaseClang - - CompatibilityCheckX86 - - CompatibilityCheckAarch64 - - IntegrationTestsFlakyCheck - - SQLancerTestRelease - - SQLancerTestDebug - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Finish label - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 finish_check.py - python3 merge_pr.py --check-approved -############################################################################################## -########################### SQLLOGIC TEST ################################################### -############################################################################################## - SQLLogicTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqllogic_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Sqllogic test (release) - REPO_COPY=${{runner.temp}}/sqllogic_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Sqllogic test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -##################################### SQL TEST ############################################### -############################################################################################## - SQLTest: - needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqltest - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLTest - REPO_COPY=${{runner.temp}}/sqltest/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLTest - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqltest.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 0742ebfd4490..000000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: PublishedReleaseCI -# - Gets artifacts from S3 -# - Sends it to JFROG Artifactory -# - Adds them to the release assets - -on: # yamllint disable-line rule:truthy - release: - types: - - published - workflow_dispatch: - inputs: - tag: - description: 'Release tag' - required: true - type: string - -jobs: - ReleasePublish: - runs-on: [self-hosted, style-checker] - steps: - - name: Set tag from input - if: github.event_name == 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV" - - name: Set tag from REF - if: github.event_name == 'release' - run: | - echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV" - - name: Deploy packages and assets - run: | - curl --silent --data '' --no-buffer \ - '${{ secrets.PACKAGES_RELEASE_URL }}/release/'"${GITHUB_TAG}"'?binary=binary_darwin&binary=binary_darwin_aarch64&sync=true' - ############################################################################################ - ##################################### Docker images ####################################### - ############################################################################################ - DockerServerImages: - runs-on: [self-hosted, style-checker] - steps: - - name: Set tag from input - if: github.event_name == 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV" - - name: Set tag from REF - if: github.event_name == 'release' - run: | - echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV" - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # otherwise we will have no version info - ref: ${{ env.GITHUB_TAG }} - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \ - --image-repo clickhouse/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index fba56339d166..2fde382bfae6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -3,20 +3,38 @@ name: ReleaseBranchCI env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + REGRESSION_RESULTS_URL: altinity-build-artifacts/${{github.event.number}}/$GITHUB_SHA + REGRESSION_ARM_COMMIT: 19e8624c5e4ccc65b128d27b19836c0570e53991 + on: # yamllint disable-line rule:truthy + pull_request: + types: + - synchronize + - reopened + - opened + branches: + # Anything/23.8 (e.g customizations/23.8.x) + - '**/23.8*' + release: + types: + - published + - prereleased push: branches: - # 22.1 and 22.10 - - '2[1-9].[1-9][0-9]' - - '2[1-9].[1-9]' + - 'releases/23.8**' jobs: DockerHubPushAarch64: - runs-on: [self-hosted, style-checker-aarch64] + runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-fsn1, image-arm-app-docker-ce] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Images check @@ -29,10 +47,10 @@ jobs: name: changed_images_aarch64 path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json DockerHubPushAmd64: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Images check @@ -46,10 +64,10 @@ jobs: path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json DockerHubPush: needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags @@ -72,9 +90,10 @@ jobs: with: name: changed_images path: ${{ runner.temp }}/changed_images.json - CompatibilityCheckX86: + CompatibilityCheck: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] + timeout-minutes: 180 steps: - name: Set envs run: | @@ -84,7 +103,7 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Download json reports @@ -103,43 +122,13 @@ jobs: docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" - CompatibilityCheckAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] + runs-on: [self-hosted, builder, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 steps: - name: Set envs run: | @@ -149,14 +138,17 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images uses: actions/download-artifact@v3 with: name: changed_images path: ${{ env.IMAGES_PATH }} + - name: Trust My Directory + run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true submodules: true @@ -164,8 +156,9 @@ jobs: - name: Build run: | sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + mkdir -p "$TEMP_PATH/build_check/package_release" + cd .. && tar czf $TEMP_PATH/build_source.src.tar.gz ClickHouse/ + cd $TEMP_PATH && tar xzf $TEMP_PATH/build_source.src.tar.gz cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} @@ -181,7 +174,7 @@ jobs: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" BuilderDebAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] + runs-on: [self-hosted, builder, on-demand, type-cax41, image-arm-app-docker-ce] steps: - name: Set envs run: | @@ -191,18 +184,42 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_aarch64 + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images uses: actions/download-artifact@v3 with: name: changed_images path: ${{ runner.temp }}/images_path + - name: Trust My Directory + run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true submodules: true - fetch-depth: 0 # For a proper version and performance artifacts + - name: Apply sparse checkout for contrib # in order to check that it doesn't break build + run: | + rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' + git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' + "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' + du -hs "$GITHUB_WORKSPACE/contrib" ||: + find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: + - name: Apply sparse checkout for contrib # in order to check that it doesn't break build + run: | + rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' + git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' + "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' + du -hs "$GITHUB_WORKSPACE/contrib" ||: + find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: + fetch-depth: 0 # otherwise we will have no info about contributors + - name: Apply sparse checkout for contrib # in order to check that it doesn't break build + run: | + rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' + git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' + "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' + du -hs "$GITHUB_WORKSPACE/contrib" ||: + find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - name: Build run: | sudo rm -fr "$TEMP_PATH" @@ -220,1421 +237,413 @@ jobs: docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebAsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] +############################################################################################ +##################################### Docker images ####################################### +############################################################################################ + DockerServerImages: + needs: + - BuilderDebRelease + - BuilderDebAarch64 + runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + steps: + - name: Check out repository code + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + with: + clear-repository: true + fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself + - name: Check docker altinityinfra/clickhouse-server building + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_server.py --release-type head --no-push \ + --image-repo altinityinfra/clickhouse-server --image-path docker/server + python3 docker_server.py --release-type head --no-push \ + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" +############################################################################################ +##################################### BUILD REPORTER ####################################### +############################################################################################ + BuilderReport: + needs: + - BuilderDebRelease + - BuilderDebAarch64 + runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] + timeout-minutes: 180 + if: ${{ success() || failure() }} steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan + CHECK_NAME=ClickHouse build check + REPORTS_PATH=${{runner.temp}}/reports_dir + REPORTS_PATH=${{runner.temp}}/reports_dir + TEMP_PATH=${{runner.temp}}/report_check + NEEDS_DATA_PATH=${{runner.temp}}/needs.json EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - submodules: true - - name: Build + - name: Report Builder run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cat > "$NEEDS_DATA_PATH" << 'EOF' + ${{ toJSON(needs) }} + EOF + cd "$GITHUB_WORKSPACE/tests/ci" + python3 build_report_check.py "$CHECK_NAME" - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebUBsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + # BuilderSpecialReport: + # needs: + # # - BuilderBinDarwin + # - BuilderBinDarwinAarch64 + # runs-on: [self-hosted, style-checker] + # if: ${{ success() || failure() }} + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/report_check + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=ClickHouse special build check + # NEEDS_DATA_PATH=${{runner.temp}}/needs.json + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + # with: + # clear-repository: true + # - name: Report Builder + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cat > "$NEEDS_DATA_PATH" << 'EOF' + # ${{ toJSON(needs) }} + # EOF + # cd "$GITHUB_WORKSPACE/tests/ci" + # python3 build_report_check.py "$CHECK_NAME" + # - name: Cleanup + # if: always() + # run: | + # # shellcheck disable=SC2046 + # docker kill $(docker ps -q) ||: + # # shellcheck disable=SC2046 + # docker rm -f $(docker ps -a -q) ||: + # sudo rm -fr "$TEMP_PATH" + MarkReleaseReady: + needs: + # - BuilderBinDarwin + # - BuilderBinDarwinAarch64 + - BuilderDebRelease + - BuilderDebAarch64 + - SignRelease + runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] + timeout-minutes: 180 + steps: + - name: Check out repository code + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + with: + clear-repository: true + - name: Mark Commit Release Ready + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 mark_release_ready.py +############################################################################################ +#################################### INSTALL PACKAGES ###################################### +############################################################################################ + InstallPackagesTestRelease: + needs: [SignRelease] + runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 steps: + - name: Setup + run: | + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan + TEMP_PATH=${{runner.temp}}/test_install + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Install packages (amd64) + REPO_COPY=${{runner.temp}}/test_install/ClickHouse EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - submodules: true - - name: Build + - name: Test packages installation run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebTsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + # InstallPackagesTestAarch64: + # needs: [BuilderDebAarch64] + # runs-on: [self-hosted, style-checker-aarch64] + # steps: + # - name: Set envs + # run: | + # cat >> "$GITHUB_ENV" << 'EOF' + # TEMP_PATH=${{runner.temp}}/test_install + # REPORTS_PATH=${{runner.temp}}/reports_dir + # CHECK_NAME=Install packages (arm64) + # REPO_COPY=${{runner.temp}}/test_install/ClickHouse + # EOF + # - name: Download json reports + # uses: actions/download-artifact@v3 + # with: + # path: ${{ env.REPORTS_PATH }} + # - name: Check out repository code + # uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + # with: + # clear-repository: true + # - name: Test packages installation + # run: | + # sudo rm -fr "$TEMP_PATH" + # mkdir -p "$TEMP_PATH" + # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" + # cd "$REPO_COPY/tests/ci" + # python3 install_check.py "$CHECK_NAME" + # - name: Cleanup + # if: always() + # run: | + # docker ps --quiet | xargs --no-run-if-empty docker kill ||: + # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + # sudo rm -fr "$TEMP_PATH" + + tests_start: + ## Do-nothing stage to trigger tests, makes is easier to + needs: [InstallPackagesTestRelease] + runs-on: ubuntu-latest + timeout-minutes: 180 + steps: + - run: true +############################################################################################## +########################### FUNCTIONAl STATELESS TESTS ####################################### +############################################################################################## + FunctionalStatelessTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester, on-demand, type-cpx51, image-x86-snapshot-docker_ipv6_x86] steps: + - name: Setup + run: | + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan + TEMP_PATH=${{runner.temp}}/stateless_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateless tests (release) + REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse + KILL_TIMEOUT=10800 EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - submodules: true - - name: Build + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebMsan: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + + + FunctionalStatelessTestAarch64: + needs: [BuilderDebAarch64] + runs-on: [self-hosted, func-tester, on-demand, type-cax41, image-arm-snapshot-docker_ipv6_arm] steps: + - name: Setup + run: | + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan + TEMP_PATH=${{runner.temp}}/stateless_release + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateless tests (aarch64) + REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse + KILL_TIMEOUT=10800 EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - submodules: true - - name: Build + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderDebDebug: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] + sudo rm -fr "$TEMP_PATH" + + +############################################################################################## +############################ FUNCTIONAl STATEFUL TESTS ####################################### +############################################################################################## + FunctionalStatefulTestRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, func-tester, on-demand, type-cpx51, image-x86-snapshot-docker_ipv6_x86] steps: + - name: Setup + run: | + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug + TEMP_PATH=${{runner.temp}}/stateful_debug + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME=Stateful tests (release) + REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse + KILL_TIMEOUT=3600 EOF - - name: Download changed images + - name: Download json reports uses: actions/download-artifact@v3 with: - name: changed_images - path: ${{ env.IMAGES_PATH }} + path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwin: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" - BuilderBinDarwinAarch64: - needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" -############################################################################################ -##################################### Docker images ####################################### -############################################################################################ - DockerServerImages: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker clickhouse/clickhouse-server building - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --no-push \ - --image-repo clickhouse/clickhouse-keeper --image-path docker/keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################ -##################################### BUILD REPORTER ####################################### -############################################################################################ - BuilderReport: - needs: - - BuilderDebRelease - - BuilderDebAarch64 - - BuilderDebAsan - - BuilderDebTsan - - BuilderDebUBsan - - BuilderDebMsan - - BuilderDebDebug - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - BuilderSpecialReport: - needs: - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - MarkReleaseReady: - needs: - - BuilderBinDarwin - - BuilderBinDarwinAarch64 - - BuilderDebRelease - - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Mark Commit Release Ready - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 mark_release_ready.py -############################################################################################ -#################################### INSTALL PACKAGES ###################################### -############################################################################################ - InstallPackagesTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - InstallPackagesTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -########################### FUNCTIONAl STATELESS TESTS ####################################### -############################################################################################## - FunctionalStatelessTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -############################ FUNCTIONAl STATEFUL TESTS ####################################### -############################################################################################## - FunctionalStatefulTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAarch64: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestTsan: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatefulTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -######################################### STRESS TESTS ####################################### -############################################################################################## - StressTestAsan: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestTsan: - needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestMsan: - needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - StressTestUBsan: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" - StressTestDebug: - needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] + FunctionalStatefulTestAarch64: + needs: [BuilderDebAarch64] + runs-on: [self-hosted, func-tester, on-demand, type-cax41, image-arm-snapshot-docker_ipv6_arm] steps: + - name: Setup + run: | + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug + TEMP_PATH=${{runner.temp}}/stateful_release REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse + CHECK_NAME=Stateful tests (aarch64) + REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse + KILL_TIMEOUT=3600 EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - - name: Stress test + - name: Functional test run: | sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH" cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - name: Cleanup if: always() run: | @@ -1644,26 +653,26 @@ jobs: ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] + IntegrationTestsRelease0: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester, func-tester] steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan + TEMP_PATH=${{runner.temp}}/integration_tests_release REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse + CHECK_NAME=Integration tests (release) + REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 + RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Integration test @@ -1679,26 +688,26 @@ jobs: docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] + IntegrationTestsRelease1: + needs: [BuilderDebRelease] + runs-on: [self-hosted, stress-tester, func-tester] steps: - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan + TEMP_PATH=${{runner.temp}}/integration_tests_release REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse + CHECK_NAME=Integration tests (release) + REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 + RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Integration test @@ -1714,299 +723,951 @@ jobs: docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] +############################################################################################# +##################################### REGRESSION TESTS ###################################### +############################################################################################# + RegressionStart: + ## Not depending on the tests above since they can fail at any given moment. + needs: [BuilderDebRelease, BuilderDebAarch64] + runs-on: ubuntu-latest + steps: + - run: true + + RegressionCommonAmd64: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 + SUITE=${{ matrix.SUITE }} + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: ${{ env.SUITE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionCommonAarch64: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=${{ matrix.SUITE }} + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionBenchmarkAmd64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: benchmark-${{ matrix.STORAGE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionBenchmarkAarch64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionClickHouseKeeperSSLAmd64: + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_COMMON_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 + SUITE=clickhouse_keeper + STORAGE=/ssl + artifacts=builds EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: ${{ env.SUITE }}-ssl-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionClickHouseKeeperSSLAarch64: + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_COMMON_COMMIT }} + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-ssl-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionLDAPAmd64: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 + SUITE=ldap/${{ matrix.SUITE }} + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: ldap-${{ matrix.SUITE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionLDAPAarch64: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionParquetAmd64: + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 + SUITE=parquet + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + --storage minio + --storage aws_s3 + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --storage gcs + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: ${{ env.SUITE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionS3Amd64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionS3Aarch64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_ARM_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() with: - clear-repository: true - - name: Integration test + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionTieredStorageS3Amd64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + - name: Create and upload logs if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-amd64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + RegressionTieredStorageS3Aarch64: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [RegressionStart] + runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] + timeout-minutes: 180 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ env.REGRESSION_ARM_COMMIT }} - name: Set envs run: | cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + artifacts=public EOF - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --test-to-end + --local + --collect-service-logs + --output classic + --parallel 1 + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --log raw.log + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + env: + artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-aarch64-artifacts + path: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + SignRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, on-demand, type-cpx41, image-x86-app-docker-ce] + timeout-minutes: 180 + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/signed + REPORTS_PATH=${{runner.temp}}/reports_dir + EOF + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: actions/checkout@v2 + - name: Download json reports + uses: actions/download-artifact@v2 with: - clear-repository: true - - name: Integration test + path: ${{ env.REPORTS_PATH }} + - name: Sign release + env: + GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} + GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} + REPORTS_PATH: ${{ env.REPORTS_PATH }} run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" + cd "$GITHUB_WORKSPACE/tests/ci" + python3 sign_release.py + - name: Upload signed hashes + uses: actions/upload-artifact@v2 + with: + name: signed-hashes + path: ${{ env.TEMP_PATH }}/*.gpg - name: Cleanup if: always() run: | docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" + ########################################################################################### + ################################ FINISH CHECK ############################################# + ########################################################################################### FinishCheck: needs: - DockerHubPush - DockerServerImages - BuilderReport - - BuilderSpecialReport + # - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - FunctionalStatelessTestRelease - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestUBsan - - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - FunctionalStatefulTestAarch64 - - FunctionalStatefulTestAsan - - FunctionalStatefulTestTsan - - FunctionalStatefulTestMsan - - FunctionalStatefulTestUBsan - - StressTestDebug - - StressTestAsan - - StressTestTsan - - StressTestMsan - - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - IntegrationTestsRelease0 - IntegrationTestsRelease1 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - CompatibilityCheckX86 - - CompatibilityCheckAarch64 - runs-on: [self-hosted, style-checker] + - CompatibilityCheck + - RegressionCommonAmd64 + - RegressionCommonAarch64 + - RegressionBenchmarkAmd64 + - RegressionBenchmarkAarch64 + - RegressionClickHouseKeeperSSLAmd64 + - RegressionClickHouseKeeperSSLAarch64 + - RegressionLDAPAmd64 + - RegressionLDAPAarch64 + - RegressionParquetAmd64 + - RegressionS3Amd64 + - RegressionS3Aarch64 + - RegressionTieredStorageS3Amd64 + - RegressionTieredStorageS3Aarch64 + - SignRelease + runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 with: clear-repository: true - name: Finish label diff --git a/.github/workflows/tags_stable.yml b/.github/workflows/tags_stable.yml deleted file mode 100644 index f5b42e9c882a..000000000000 --- a/.github/workflows/tags_stable.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: TagsStableWorkflow -# - Gets artifacts from S3 -# - Sends it to JFROG Artifactory -# - Adds them to the release assets - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -on: # yamllint disable-line rule:truthy - push: - tags: - - 'v*-prestable' - - 'v*-stable' - - 'v*-lts' - workflow_dispatch: - inputs: - tag: - description: 'Test tag' - required: true - type: string - - -jobs: - UpdateVersions: - runs-on: [self-hosted, style-checker] - steps: - - name: Set test tag - if: github.event_name == 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV" - - name: Get tag name - if: github.event_name != 'workflow_dispatch' - run: | - echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV" - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - ref: master - fetch-depth: 0 - - name: Update versions, docker version, changelog, security - env: - GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - run: | - ./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv - ./utils/list-versions/update-docker-version.sh - GID=$(id -g "${UID}") - docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \ - --volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \ - /ClickHouse/utils/changelog/changelog.py -v --debug-helpers \ - --gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \ - --output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}" - git add "./docs/changelogs/${GITHUB_TAG}.md" - python3 ./utils/security-generator/generate_security.py > SECURITY.md - git diff HEAD - - name: Create Pull Request - uses: peter-evans/create-pull-request@v3 - with: - author: "robot-clickhouse " - token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - committer: "robot-clickhouse " - commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - branch: auto/${{ env.GITHUB_TAG }} - assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher - delete-branch: true - title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - labels: do not test - body: | - Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }} - - ### Changelog category (leave one): - - Not for changelog (changelog entry is not required) diff --git a/.github/workflows/woboq.yml b/.github/workflows/woboq.yml deleted file mode 100644 index 1ef729af30a4..000000000000 --- a/.github/workflows/woboq.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: WoboqBuilder -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: woboq -on: # yamllint disable-line rule:truthy - workflow_dispatch: - workflow_call: -jobs: - # don't use dockerhub push because this image updates so rarely - WoboqCodebrowser: - runs-on: [self-hosted, style-checker] - timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/codebrowser - REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse - IMAGES_PATH=${{runner.temp}}/images_path - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: 'true' - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.IMAGES_PATH }} - - name: Codebrowser - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index b325ee18fa3f..0b335c80fcbe 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -7,6 +7,10 @@ SET(VERSION_MAJOR 23) SET(VERSION_MINOR 8) SET(VERSION_PATCH 4) SET(VERSION_GITHASH ebe4eb3d23e3f0505c33262966e50d6241506774) -SET(VERSION_DESCRIBE v23.8.4.1-lts) -SET(VERSION_STRING 23.8.4.1) + +SET(VERSION_TWEAK 70) +SET(VERSION_FLAVOUR altinitystable) + +SET(VERSION_DESCRIBE v23.8.4.70.altinitystable) +SET(VERSION_STRING 23.8.4.70.altinitystable) # end of autochange diff --git a/cmake/version.cmake b/cmake/version.cmake index 9ca21556f4d4..06fb783b88f2 100644 --- a/cmake/version.cmake +++ b/cmake/version.cmake @@ -19,5 +19,5 @@ set (VERSION_STRING_SHORT "${VERSION_MAJOR}.${VERSION_MINOR}") math (EXPR VERSION_INTEGER "${VERSION_PATCH} + ${VERSION_MINOR}*1000 + ${VERSION_MAJOR}*1000000") if(CLICKHOUSE_OFFICIAL_BUILD) - set(VERSION_OFFICIAL " (official build)") + set(VERSION_OFFICIAL " (altinity build)") endif() diff --git a/docker/images.json b/docker/images.json index d895e2da2f03..5242ab82fc44 100644 --- a/docker/images.json +++ b/docker/images.json @@ -1,32 +1,30 @@ { "docker/packager/binary": { - "name": "clickhouse/binary-builder", - "dependent": [ - "docker/test/codebrowser" - ] + "name": "altinityinfra/binary-builder", + "dependent": [] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", + "name": "altinityinfra/fuzzer", "dependent": [] }, "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", + "name": "altinityinfra/performance-comparison", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/packager/binary", "docker/test/base", @@ -34,140 +32,119 @@ ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress", "docker/test/upgrade" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/stress": { - "name": "clickhouse/stress-test", + "name": "altinityinfra/stress-test", "dependent": [] }, "docker/test/upgrade": { - "name": "clickhouse/upgrade-check", - "dependent": [] - }, - "docker/test/codebrowser": { - "name": "clickhouse/codebrowser", + "name": "altinityinfra/upgrade-check", "dependent": [] }, "docker/test/integration/runner": { "only_amd64": true, - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", + "name": "altinityinfra/fasttest", "dependent": [] }, "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/style-test", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { "only_amd64": true, - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", - "dependent": [ - "docker/test/fuzzer", - "docker/test/integration/base", - "docker/test/keeper-jepsen", - "docker/test/server-jepsen", - "docker/test/sqllogic", - "docker/test/sqltest", - "docker/test/stateless" - ] + "name": "altinityinfra/test-base", + "dependent": [ + "docker/test/stateless", + "docker/test/integration/base" + ] }, "docker/test/integration/kerberized_hadoop": { "only_amd64": true, - "name": "clickhouse/kerberized-hadoop", - "dependent": [] - }, - "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", - "dependent": [] - }, - "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/server-jepsen": { - "name": "clickhouse/server-jepsen-test", + "name": "altinityinfra/server-jepsen-test", "dependent": [] }, "docker/test/install/deb": { - "name": "clickhouse/install-deb-test", + "name": "altinityinfra/install-deb-test", "dependent": [] }, "docker/test/install/rpm": { - "name": "clickhouse/install-rpm-test", - "dependent": [] - }, - "docker/docs/builder": { - "name": "clickhouse/docs-builder", + "name": "altinityinfra/install-rpm-test", "dependent": [] }, "docker/test/sqllogic": { - "name": "clickhouse/sqllogic-test", + "name": "altinityinfra/sqllogic-test", "dependent": [] }, "docker/test/sqltest": { - "name": "clickhouse/sqltest", + "name": "altinityinfra/sqltest", "dependent": [] }, "docker/test/integration/nginx_dav": { - "name": "clickhouse/nginx-dav", + "name": "altinityinfra/nginx-dav", "dependent": [] } } diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index 940daad9c61f..38a578ff9897 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -1,7 +1,8 @@ -# docker build -t clickhouse/binary-builder . + +# docker build -t altinityinfra/binary-builder . ARG FROM_TAG=latest -FROM clickhouse/test-util:latest AS cctools -# The cctools are built always from the clickhouse/test-util:latest and cached inline +FROM altinityinfra/test-util:$FROM_TAG as cctools +# The cctools are built always from the altinityinfra/test-util:latest and cached inline # Theoretically, it should improve rebuild speed significantly ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} @@ -39,7 +40,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ # END COMPILE # !!!!!!!!!!! -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} diff --git a/docker/packager/binary/build.sh b/docker/packager/binary/build.sh index 1f436d964759..05049bee246f 100755 --- a/docker/packager/binary/build.sh +++ b/docker/packager/binary/build.sh @@ -27,9 +27,13 @@ fi # export CCACHE_LOGFILE=/build/ccache.log # export CCACHE_DEBUG=1 +# TODO(vnemkov): this might not be needed anymore, but let's keep it for the reference. Maybe remove or un-comment on next build attempt? +# https://stackoverflow.com/a/71940133 +# git config --global --add safe.directory '*' mkdir -p /build/build_docker cd /build/build_docker + rm -f CMakeCache.txt # Read cmake arguments into array (possibly empty) read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}" diff --git a/docker/packager/packager b/docker/packager/packager index 6b3a3f2bb245..c8025b64fc54 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -10,7 +10,7 @@ from typing import List, Optional SCRIPT_PATH = Path(__file__).absolute() IMAGE_TYPE = "binary" -IMAGE_NAME = f"clickhouse/{IMAGE_TYPE}-builder" +IMAGE_NAME = f"altinityinfra/{IMAGE_TYPE}-builder" class BuildException(Exception): @@ -100,13 +100,13 @@ def run_docker_image_with_env( else: user = f"{os.geteuid()}:{os.getegid()}" - ccache_mount = f"--volume={ccache_dir}:/ccache" + ccache_mount = f" --volume={ccache_dir}:/ccache" if ccache_dir is None: ccache_mount = "" cmd = ( f"docker run --network=host --user={user} --rm {ccache_mount}" - f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} " + f" --volume={output_dir}:/output --volume={ch_root}:/build {env_part} " f"--volume={cargo_cache_dir}:/rust/cargo/registry {interactive} {image_name}" ) diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index b55baa0e0fc3..212710787d7e 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/test-base . +# docker build -t altinityinfra/test-base . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/codebrowser/Dockerfile b/docker/test/codebrowser/Dockerfile index 8136fd1fbbcd..f9761315be80 100644 --- a/docker/test/codebrowser/Dockerfile +++ b/docker/test/codebrowser/Dockerfile @@ -2,7 +2,7 @@ # docker build --network=host -t clickhouse/codebrowser . # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser ARG FROM_TAG=latest -FROM clickhouse/binary-builder:$FROM_TAG +FROM altinityinfra/binary-builder:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index ad24e662a6c9..e6d7fea17179 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/fasttest . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index 0bc0fb06633b..745db8769bb4 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/fuzzer . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 270b40e23a6d..caf0f38bec4e 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/integration-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG SHELL ["/bin/bash", "-c"] diff --git a/docker/test/integration/runner/compose/docker_compose_clickhouse.yml b/docker/test/integration/runner/compose/docker_compose_clickhouse.yml index fdd124ede91a..ff4523c5b0d7 100644 --- a/docker/test/integration/runner/compose/docker_compose_clickhouse.yml +++ b/docker/test/integration/runner/compose/docker_compose_clickhouse.yml @@ -2,4 +2,4 @@ version: '2.3' # Used to pre-pull images with docker-compose services: clickhouse1: - image: clickhouse/integration-test + image: altinityinfra/integration-test diff --git a/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml b/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml index b63dac51522c..e5746fa209fb 100644 --- a/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_dotnet_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: dotnet1: - image: clickhouse/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} + image: altinityinfra/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml index b3686adc21c4..807916860a15 100644 --- a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml +++ b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml @@ -1,7 +1,8 @@ version: '2.3' services: bridge1: - image: clickhouse/jdbc-bridge + # NOTE(vnemkov): not produced by CI/CD, so must not be replaced with altinityinfra/jdbc-bridge + image: altinityinfra/jdbc-bridge command: | /bin/bash -c 'cat << EOF > config/datasources/self.json { diff --git a/docker/test/integration/runner/compose/docker_compose_keeper.yml b/docker/test/integration/runner/compose/docker_compose_keeper.yml index 91010c4aa83d..fba5bc728f88 100644 --- a/docker/test/integration/runner/compose/docker_compose_keeper.yml +++ b/docker/test/integration/runner/compose/docker_compose_keeper.yml @@ -1,7 +1,7 @@ version: '2.3' services: zoo1: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -37,7 +37,7 @@ services: - inet6 - rotate zoo2: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -73,7 +73,7 @@ services: - inet6 - rotate zoo3: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml index e955a14eb3df..58d321177c0d 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml @@ -4,7 +4,7 @@ services: kerberizedhdfs1: cap_add: - DAC_READ_SEARCH - image: clickhouse/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} hostname: kerberizedhdfs1 restart: always volumes: @@ -24,7 +24,7 @@ services: net.ipv4.ip_local_port_range: '55000 65535' hdfskerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: hdfskerberos volumes: - ${KERBERIZED_HDFS_DIR}/secrets:/tmp/keytab diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml index 49d4c1db90fe..7ae1011b1876 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_kafka.yml @@ -52,7 +52,7 @@ services: net.ipv4.ip_local_port_range: '55000 65535' kafka_kerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: kafka_kerberos volumes: - ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab diff --git a/docker/test/integration/runner/compose/docker_compose_kerberos_kdc.yml b/docker/test/integration/runner/compose/docker_compose_kerberos_kdc.yml index 3ce9a6df1fb6..062bdace6e9c 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberos_kdc.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberos_kdc.yml @@ -2,7 +2,7 @@ version: '2.3' services: kerberoskdc: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: kerberoskdc volumes: - ${KERBEROS_KDC_DIR}/secrets:/tmp/keytab diff --git a/docker/test/integration/runner/compose/docker_compose_minio.yml b/docker/test/integration/runner/compose/docker_compose_minio.yml index f2979566296f..9b1748654238 100644 --- a/docker/test/integration/runner/compose/docker_compose_minio.yml +++ b/docker/test/integration/runner/compose/docker_compose_minio.yml @@ -21,14 +21,14 @@ services: # HTTP proxies for Minio. proxy1: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" # Redirect proxy port - "80" # Reverse proxy port - "443" # Reverse proxy port (secure) proxy2: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" - "80" @@ -36,7 +36,7 @@ services: # Empty container to run proxy resolver. resolver: - image: clickhouse/python-bottle + image: altinityinfra/python-bottle expose: - "8080" tty: true diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml index 56cc04105740..09154b584244 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_golang_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: golang1: - image: clickhouse/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} + image: altinityinfra/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml index eb5ffb01baa2..a84cef915df2 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_java_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: java1: - image: clickhouse/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml index 90939449c5f3..b46eb2706c47 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_js_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: mysqljs1: - image: clickhouse/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} + image: altinityinfra/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml b/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml index 408b8ff089a9..662783a00a1f 100644 --- a/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_mysql_php_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: php1: - image: clickhouse/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} + image: altinityinfra/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/compose/docker_compose_nginx.yml b/docker/test/integration/runner/compose/docker_compose_nginx.yml index 38d2a6d84c84..9d4403f283fb 100644 --- a/docker/test/integration/runner/compose/docker_compose_nginx.yml +++ b/docker/test/integration/runner/compose/docker_compose_nginx.yml @@ -5,7 +5,7 @@ services: # Files will be put into /usr/share/nginx/files. nginx: - image: clickhouse/nginx-dav:${DOCKER_NGINX_DAV_TAG:-latest} + image: altinityinfra/nginx-dav:${DOCKER_NGINX_DAV_TAG:-latest} restart: always ports: - 80:80 diff --git a/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml b/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml index 904bfffdfd5b..5c8673ae3eeb 100644 --- a/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml +++ b/docker/test/integration/runner/compose/docker_compose_postgresql_java_client.yml @@ -1,6 +1,6 @@ version: '2.2' services: java: - image: clickhouse/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/docker/test/integration/runner/dockerd-entrypoint.sh b/docker/test/integration/runner/dockerd-entrypoint.sh index b05aef76faf8..f4fb84acbe18 100755 --- a/docker/test/integration/runner/dockerd-entrypoint.sh +++ b/docker/test/integration/runner/dockerd-entrypoint.sh @@ -38,9 +38,11 @@ while true; do reties=$((reties+1)) if [[ $reties -ge 100 ]]; then # 10 sec max echo "Can't start docker daemon, timeout exceeded." >&2 + cat /ClickHouse/tests/integration/dockerd.log >&2 exit 1; fi - sleep 0.1 + # For whatever reason docker seems to be unable to start in 10 seconds, so effectivly increeaing timeout to 30 seconds + sleep 0.3 done set -e diff --git a/docker/test/keeper-jepsen/Dockerfile b/docker/test/keeper-jepsen/Dockerfile index a794e076ec02..b93b07189012 100644 --- a/docker/test/keeper-jepsen/Dockerfile +++ b/docker/test/keeper-jepsen/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/keeper-jepsen-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV DEBIAN_FRONTEND=noninteractive ENV CLOJURE_VERSION=1.10.3.814 diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile index d31663f90711..9864cfe6649e 100644 --- a/docker/test/performance-comparison/Dockerfile +++ b/docker/test/performance-comparison/Dockerfile @@ -1,7 +1,7 @@ # docker build -t clickhouse/performance-comparison . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ diff --git a/docker/test/server-jepsen/Dockerfile b/docker/test/server-jepsen/Dockerfile index a212427b2a1a..8625058e2502 100644 --- a/docker/test/server-jepsen/Dockerfile +++ b/docker/test/server-jepsen/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/server-jepsen-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV DEBIAN_FRONTEND=noninteractive ENV CLOJURE_VERSION=1.10.3.814 diff --git a/docker/test/stateful/Dockerfile b/docker/test/stateful/Dockerfile index f513735a2d0a..85e2bee4bba9 100644 --- a/docker/test/stateful/Dockerfile +++ b/docker/test/stateful/Dockerfile @@ -1,7 +1,8 @@ # rebuild in #47031 # docker build -t clickhouse/stateful-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +# TODO consider replacing clickhouse with altinityinfra dockerhub account +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ @@ -9,6 +10,8 @@ RUN apt-get update -y \ python3-requests \ nodejs \ npm \ + rpm2cpio \ + cpio \ && apt-get clean COPY s3downloader /s3downloader diff --git a/docker/test/stateful/setup_minio.sh b/docker/test/stateful/setup_minio.sh deleted file mode 120000 index 0d539f72cb34..000000000000 --- a/docker/test/stateful/setup_minio.sh +++ /dev/null @@ -1 +0,0 @@ -../stateless/setup_minio.sh \ No newline at end of file diff --git a/docker/test/stateful/setup_minio.sh b/docker/test/stateful/setup_minio.sh new file mode 100755 index 000000000000..c0deb46a9602 --- /dev/null +++ b/docker/test/stateful/setup_minio.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +USAGE='Usage for local run: + +./docker/test/stateless/setup_minio.sh { stateful | stateless } ./tests/ + +' + +set -e -x -a -u + +TEST_TYPE="$1" +shift + +case $TEST_TYPE in + stateless) QUERY_DIR=0_stateless ;; + stateful) QUERY_DIR=1_stateful ;; + *) echo "unknown test type $TEST_TYPE"; echo "${USAGE}"; exit 1 ;; +esac + +ls -lha + +mkdir -p ./minio_data + +if [ ! -f ./minio ]; then + MINIO_SERVER_VERSION=${MINIO_SERVER_VERSION:-2022-01-03T18-22-58Z} + MINIO_CLIENT_VERSION=${MINIO_CLIENT_VERSION:-2022-01-05T23-52-51Z} + case $(uname -m) in + x86_64) BIN_ARCH=amd64 ;; + aarch64) BIN_ARCH=arm64 ;; + *) echo "unknown architecture $(uname -m)"; exit 1 ;; + esac + echo 'MinIO binary not found, downloading...' + + BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]') + + wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-${BIN_ARCH}/archive/minio.RELEASE.${MINIO_SERVER_VERSION}" -O ./minio \ + && wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-${BIN_ARCH}/archive/mc.RELEASE.${MINIO_CLIENT_VERSION}" -O ./mc \ + && chmod +x ./mc ./minio +fi + +MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse} +MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse} + +./minio --version + +./minio server --address ":11111" ./minio_data & + +i=0 +while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied +do + if [[ $i == 60 ]]; then + echo "Failed to setup minio" + exit 0 + fi + echo "Trying to connect to minio" + sleep 1 + i=$((i + 1)) +done + +lsof -i :11111 + +sleep 5 + +./mc alias set clickminio http://localhost:11111 clickhouse clickhouse +./mc admin user add clickminio test testtest +./mc admin policy set clickminio readwrite user=test +./mc mb clickminio/test +if [ "$TEST_TYPE" = "stateless" ]; then + ./mc policy set public clickminio/test +fi + + +# Upload data to Minio. By default after unpacking all tests will in +# /usr/share/clickhouse-test/queries + +TEST_PATH=${1:-/usr/share/clickhouse-test} +MINIO_DATA_PATH=${TEST_PATH}/queries/${QUERY_DIR}/data_minio + +# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename +# shellcheck disable=SC2045 +for FILE in $(ls "${MINIO_DATA_PATH}"); do + echo "$FILE"; + ./mc cp "${MINIO_DATA_PATH}"/"$FILE" clickminio/test/"$FILE"; +done + +mkdir -p ~/.aws +cat <> ~/.aws/credentials +[default] +aws_access_key_id=${MINIO_ROOT_USER} +aws_secret_access_key=${MINIO_ROOT_PASSWORD} +EOT diff --git a/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) b/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) new file mode 100755 index 000000000000..d077dea920c6 --- /dev/null +++ b/docker/test/stateful/setup_minio.sh~c3e81877ca (Make builds and tests possible in Altinity's infrastructure) @@ -0,0 +1,77 @@ +#!/bin/bash + +# TODO: Make this file shared with stateless tests +# +# Usage for local run: +# +# ./docker/test/stateful/setup_minio.sh ./tests/ +# + +set -e -x -a -u + +rpm2cpio ./minio-20220103182258.0.0.x86_64.rpm | cpio -i --make-directories +find -name minio +cp ./usr/local/bin/minio ./ + +ls -lha + +mkdir -p ./minio_data + +if [ ! -f ./minio ]; then + echo 'MinIO binary not found, downloading...' + + BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]') + + wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-amd64/minio" \ + && chmod +x ./minio \ + && wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-amd64/mc" \ + && chmod +x ./mc +fi + +MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse} +MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse} + +./minio --version +./minio server --address ":11111" ./minio_data & + +i=0 +while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied +do + if [[ $i == 60 ]]; then + echo "Failed to setup minio" + exit 0 + fi + echo "Trying to connect to minio" + sleep 1 + i=$((i + 1)) +done + +lsof -i :11111 + +sleep 5 + +./mc alias set clickminio http://localhost:11111 clickhouse clickhouse +./mc admin user add clickminio test testtest +./mc admin policy set clickminio readwrite user=test +./mc mb clickminio/test + + +# Upload data to Minio. By default after unpacking all tests will in +# /usr/share/clickhouse-test/queries + +TEST_PATH=${1:-/usr/share/clickhouse-test} +MINIO_DATA_PATH=${TEST_PATH}/queries/1_stateful/data_minio + +# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename +# shellcheck disable=SC2045 +for FILE in $(ls "${MINIO_DATA_PATH}"); do + echo "$FILE"; + ./mc cp "${MINIO_DATA_PATH}"/"$FILE" clickminio/test/"$FILE"; +done + +mkdir -p ~/.aws +cat <> ~/.aws/credentials +[default] +aws_access_key_id=clickhouse +aws_secret_access_key=clickhouse +EOT diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 35a6e9c365b9..2ad0e2d06119 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/stateless-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz" @@ -43,6 +43,8 @@ RUN apt-get update -y \ pv \ zip \ p7zip-full \ + rpm2cpio \ + cpio \ && apt-get clean RUN pip3 install numpy scipy pandas Jinja2 diff --git a/docker/test/stateless/setup_minio.sh b/docker/test/stateless/setup_minio.sh index c756ce4669da..269c5055ea4f 100755 --- a/docker/test/stateless/setup_minio.sh +++ b/docker/test/stateless/setup_minio.sh @@ -72,6 +72,7 @@ download_minio() { start_minio() { mkdir -p ./minio_data ./minio --version + ./minio server --address ":11111" ./minio_data & wait_for_it lsof -i :11111 diff --git a/docker/test/stateless_pytest/Dockerfile b/docker/test/stateless_pytest/Dockerfile new file mode 100644 index 000000000000..c148b6212417 --- /dev/null +++ b/docker/test/stateless_pytest/Dockerfile @@ -0,0 +1,33 @@ +# rebuild in #33610 +# docker build -t clickhouse/stateless-pytest . +ARG FROM_TAG=latest +FROM altinityinfra/test-base:$FROM_TAG + +RUN apt-get update -y && \ + apt-get install -y --no-install-recommends \ + python3-pip \ + python3-setuptools \ + python3-wheel \ + brotli \ + netcat-openbsd \ + postgresql-client \ + zstd + +RUN python3 -m pip install \ + wheel \ + pytest \ + pytest-html \ + pytest-json \ + pytest-randomly \ + pytest-rerunfailures \ + pytest-timeout \ + pytest-xdist \ + pandas \ + numpy \ + scipy + +CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \ + dpkg -i package_folder/clickhouse-common-static-dbg_*.deb; \ + dpkg -i package_folder/clickhouse-server_*.deb; \ + dpkg -i package_folder/clickhouse-client_*.deb; \ + python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --reruns=1 --timeout=600 --json=test_output/report.json --html=test_output/report.html --self-contained-html diff --git a/docker/test/stress/Dockerfile b/docker/test/stress/Dockerfile index eddeb04758ba..69573839f653 100644 --- a/docker/test/stress/Dockerfile +++ b/docker/test/stress/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/stress-test . ARG FROM_TAG=latest -FROM clickhouse/stateful-test:$FROM_TAG +FROM altinityinfra/stateful-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/unit/Dockerfile b/docker/test/unit/Dockerfile index b75bfb6661cc..378341ab8b69 100644 --- a/docker/test/unit/Dockerfile +++ b/docker/test/unit/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/unit-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get install gdb diff --git a/docker/test/upgrade/Dockerfile b/docker/test/upgrade/Dockerfile index 9152230af1cf..87fff020aecc 100644 --- a/docker/test/upgrade/Dockerfile +++ b/docker/test/upgrade/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/upgrade-check . ARG FROM_TAG=latest -FROM clickhouse/stateful-test:$FROM_TAG +FROM altinityinfra/stateful-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/packages/clickhouse-client.yaml b/packages/clickhouse-client.yaml index 4d707b28ad90..059562835d8c 100644 --- a/packages/clickhouse-client.yaml +++ b/packages/clickhouse-client.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-common-static-dbg.yaml b/packages/clickhouse-common-static-dbg.yaml index 96de4c17d88f..63b95b034944 100644 --- a/packages/clickhouse-common-static-dbg.yaml +++ b/packages/clickhouse-common-static-dbg.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-common-static.yaml b/packages/clickhouse-common-static.yaml index 95532726d945..96dd2d890a19 100644 --- a/packages/clickhouse-common-static.yaml +++ b/packages/clickhouse-common-static.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" @@ -34,8 +34,9 @@ suggests: contents: - src: root/usr/bin/clickhouse dst: /usr/bin/clickhouse -- src: root/usr/bin/clickhouse-diagnostics - dst: /usr/bin/clickhouse-diagnostics +# Excluded due to CVEs in go runtime that popup constantly +# - src: root/usr/bin/clickhouse-diagnostics +# dst: /usr/bin/clickhouse-diagnostics - src: root/usr/bin/clickhouse-extract-from-config dst: /usr/bin/clickhouse-extract-from-config - src: root/usr/bin/clickhouse-library-bridge diff --git a/packages/clickhouse-keeper-dbg.yaml b/packages/clickhouse-keeper-dbg.yaml index 28d53b39518d..c1c8a178ba74 100644 --- a/packages/clickhouse-keeper-dbg.yaml +++ b/packages/clickhouse-keeper-dbg.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-keeper.yaml b/packages/clickhouse-keeper.yaml index 9dad5382c082..f9780cd4ad9c 100644 --- a/packages/clickhouse-keeper.yaml +++ b/packages/clickhouse-keeper.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/packages/clickhouse-server.yaml b/packages/clickhouse-server.yaml index 5e2bc7c74125..9a004c3eb1c6 100644 --- a/packages/clickhouse-server.yaml +++ b/packages/clickhouse-server.yaml @@ -11,8 +11,8 @@ description: | arch: "${DEB_ARCH}" # amd64, arm64 platform: "linux" version: "${CLICKHOUSE_VERSION_STRING}" -vendor: "ClickHouse Inc." -homepage: "https://clickhouse.com" +vendor: "Altinity Inc." +homepage: "https://altinity.com/" license: "Apache" section: "database" priority: "optional" diff --git a/programs/diagnostics/internal/collectors/system/system_test.go b/programs/diagnostics/internal/collectors/system/system_test.go index fb1e16bd1ed3..70e79bfc905f 100644 --- a/programs/diagnostics/internal/collectors/system/system_test.go +++ b/programs/diagnostics/internal/collectors/system/system_test.go @@ -55,21 +55,21 @@ func TestSystemCollect(t *testing.T) { memoryUsageFrames, err := countFrameRows(diagSet, "memory_usage") require.Greater(t, memoryUsageFrames, 0) require.Nil(t, err) - // cpu - require.Equal(t, []string{"processor", "vendor", "model", "core", "numThreads", "logical", "capabilities"}, diagSet.Frames["cpu"].Columns()) - cpuFrames, err := countFrameRows(diagSet, "cpu") - require.Greater(t, cpuFrames, 0) - require.Nil(t, err) - // processes - require.Equal(t, []string{"pid", "ppid", "stime", "time", "rss", "size", "faults", "minorFaults", "majorFaults", "user", "state", "priority", "nice", "command"}, diagSet.Frames["processes"].Columns()) - processesFrames, err := countFrameRows(diagSet, "processes") - require.Greater(t, processesFrames, 0) - require.Nil(t, err) - // os - require.Equal(t, []string{"hostname", "os", "goOs", "cpus", "core", "kernel", "platform"}, diagSet.Frames["os"].Columns()) - osFrames, err := countFrameRows(diagSet, "os") - require.Greater(t, osFrames, 0) - require.Nil(t, err) + // // cpu + // require.Equal(t, []string{"processor", "vendor", "model", "core", "numThreads", "logical", "capabilities"}, diagSet.Frames["cpu"].Columns()) + // cpuFrames, err := countFrameRows(diagSet, "cpu") + // require.Greater(t, cpuFrames, 0) + // require.Nil(t, err) + // // processes + // require.Equal(t, []string{"pid", "ppid", "stime", "time", "rss", "size", "faults", "minorFaults", "majorFaults", "user", "state", "priority", "nice", "command"}, diagSet.Frames["processes"].Columns()) + // processesFrames, err := countFrameRows(diagSet, "processes") + // require.Greater(t, processesFrames, 0) + // require.Nil(t, err) + // // os + // require.Equal(t, []string{"hostname", "os", "goOs", "cpus", "core", "kernel", "platform"}, diagSet.Frames["os"].Columns()) + // osFrames, err := countFrameRows(diagSet, "os") + // require.Greater(t, osFrames, 0) + // require.Nil(t, err) }) } diff --git a/programs/diagnostics/internal/platform/database/native_test.go b/programs/diagnostics/internal/platform/database/native_test.go index 7028a4b4800b..8f47824fc49c 100644 --- a/programs/diagnostics/internal/platform/database/native_test.go +++ b/programs/diagnostics/internal/platform/database/native_test.go @@ -28,7 +28,7 @@ func createClickHouseContainer(t *testing.T, ctx context.Context) (testcontainer // for now, we test against a hardcoded database-server version but we should make this a property req := testcontainers.ContainerRequest{ - Image: fmt.Sprintf("clickhouse/clickhouse-server:%s", test.GetClickHouseTestVersion()), + Image: fmt.Sprintf("altinityinfra/clickhouse-server:%s", test.GetClickHouseTestVersion()), ExposedPorts: []string{"9000/tcp"}, WaitingFor: wait.ForLog("Ready for connections"), Mounts: testcontainers.ContainerMounts{ @@ -107,7 +107,7 @@ func TestReadTable(t *testing.T) { require.Nil(t, err) require.True(t, ok) require.Equal(t, "default", values[0]) - require.Equal(t, "/var/lib/clickhouse/", values[1]) + require.Equal(t, "/var/lib/altinityinfra/", values[1]) require.Greater(t, values[2], uint64(0)) require.Greater(t, values[3], uint64(0)) require.Greater(t, values[4], uint64(0)) @@ -134,10 +134,10 @@ func TestReadTable(t *testing.T) { frame, err := clickhouseClient.ReadTable("system", "databases", []string{}, data.OrderBy{}, 10) require.Nil(t, err) require.ElementsMatch(t, frame.Columns(), [6]string{"name", "engine", "data_path", "metadata_path", "uuid", "comment"}) - expectedRows := [4][3]string{{"INFORMATION_SCHEMA", "Memory", "/var/lib/clickhouse/"}, - {"default", "Atomic", "/var/lib/clickhouse/store/"}, - {"information_schema", "Memory", "/var/lib/clickhouse/"}, - {"system", "Atomic", "/var/lib/clickhouse/store/"}} + expectedRows := [4][3]string{{"INFORMATION_SCHEMA", "Memory", "/var/lib/altinityinfra/"}, + {"default", "Atomic", "/var/lib/altinityinfra/store/"}, + {"information_schema", "Memory", "/var/lib/altinityinfra/"}, + {"system", "Atomic", "/var/lib/altinityinfra/store/"}} i := 0 for { values, ok, err := frame.Next() @@ -181,7 +181,7 @@ func TestReadTable(t *testing.T) { frame, err := clickhouseClient.ReadTable("system", "databases", []string{}, data.OrderBy{}, 1) require.Nil(t, err) require.ElementsMatch(t, frame.Columns(), [6]string{"name", "engine", "data_path", "metadata_path", "uuid", "comment"}) - expectedRows := [1][3]string{{"INFORMATION_SCHEMA", "Memory", "/var/lib/clickhouse/"}} + expectedRows := [1][3]string{{"INFORMATION_SCHEMA", "Memory", "/var/lib/altinityinfra/"}} i := 0 for { values, ok, err := frame.Next() @@ -216,10 +216,10 @@ func TestReadTable(t *testing.T) { require.Nil(t, err) require.ElementsMatch(t, frame.Columns(), [6]string{"name", "engine", "data_path", "metadata_path", "uuid", "comment"}) expectedRows := [4][3]string{ - {"default", "Atomic", "/var/lib/clickhouse/store/"}, - {"system", "Atomic", "/var/lib/clickhouse/store/"}, - {"INFORMATION_SCHEMA", "Memory", "/var/lib/clickhouse/"}, - {"information_schema", "Memory", "/var/lib/clickhouse/"}, + {"default", "Atomic", "/var/lib/altinityinfra/store/"}, + {"system", "Atomic", "/var/lib/altinityinfra/store/"}, + {"INFORMATION_SCHEMA", "Memory", "/var/lib/altinityinfra/"}, + {"information_schema", "Memory", "/var/lib/altinityinfra/"}, } i := 0 for { @@ -256,7 +256,7 @@ func TestExecuteStatement(t *testing.T) { require.Nil(t, err) require.ElementsMatch(t, frame.Columns(), [2]string{"path", "count"}) expectedRows := [1][2]interface{}{ - {"/var/lib/clickhouse/", uint64(1)}, + {"/var/lib/altinityinfra/", uint64(1)}, } i := 0 for { diff --git a/programs/diagnostics/internal/platform/manager_test.go b/programs/diagnostics/internal/platform/manager_test.go index e6c50c6e505a..980b461626a7 100644 --- a/programs/diagnostics/internal/platform/manager_test.go +++ b/programs/diagnostics/internal/platform/manager_test.go @@ -26,7 +26,7 @@ func createClickHouseContainer(t *testing.T, ctx context.Context) (testcontainer } // for now, we test against a hardcoded database-server version but we should make this a property req := testcontainers.ContainerRequest{ - Image: fmt.Sprintf("clickhouse/clickhouse-server:%s", test.GetClickHouseTestVersion()), + Image: fmt.Sprintf("altinityinfra/clickhouse-server:%s", test.GetClickHouseTestVersion()), ExposedPorts: []string{"9000/tcp"}, WaitingFor: wait.ForLog("Ready for connections"), Mounts: testcontainers.ContainerMounts{ diff --git a/programs/diagnostics/internal/platform/utils/process_test.go b/programs/diagnostics/internal/platform/utils/process_test.go index 9baaa5597522..41118576bd84 100644 --- a/programs/diagnostics/internal/platform/utils/process_test.go +++ b/programs/diagnostics/internal/platform/utils/process_test.go @@ -50,7 +50,7 @@ func TestFindClickHouseProcessesAndConfigs(t *testing.T) { // run a ClickHouse container that guarantees that it runs only for the duration of the test req := testcontainers.ContainerRequest{ - Image: fmt.Sprintf("clickhouse/clickhouse-server:%s", test.GetClickHouseTestVersion()), + Image: fmt.Sprintf("altinityinfra/clickhouse-server:%s", test.GetClickHouseTestVersion()), ExposedPorts: []string{"9000/tcp"}, WaitingFor: wait.ForLog("Ready for connections"), Mounts: testcontainers.ContainerMounts{ diff --git a/programs/diagnostics/internal/runner_test.go b/programs/diagnostics/internal/runner_test.go index 2369f8b3007d..17fa2f818401 100644 --- a/programs/diagnostics/internal/runner_test.go +++ b/programs/diagnostics/internal/runner_test.go @@ -36,7 +36,7 @@ func TestCapture(t *testing.T) { } // for now, we test against a hardcoded database-server version but we should make this a property req := testcontainers.ContainerRequest{ - Image: fmt.Sprintf("clickhouse/clickhouse-server:%s", test.GetClickHouseTestVersion()), + Image: fmt.Sprintf("altinityinfra/clickhouse-server:%s", test.GetClickHouseTestVersion()), ExposedPorts: []string{"9000/tcp"}, WaitingFor: wait.ForLog("Ready for connections"), Mounts: testcontainers.ContainerMounts{ diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index 620462991efc..918526f051e4 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -32,7 +32,7 @@ from tee_popen import TeePopen from upload_result_helper import upload_results -IMAGE_NAME = "clickhouse/fuzzer" +IMAGE_NAME = "altinityinfra/fuzzer" def get_run_command( @@ -204,7 +204,7 @@ def main(): check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) logging.info("Result: '%s', '%s', '%s'", status, description, report_url) print(f"::notice ::Report url: {report_url}") diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 528bd41433d2..5007ac815a97 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -6,17 +6,20 @@ import logging import sys import time +from shutil import rmtree from ci_config import CI_CONFIG, BuildConfig from ccache_utils import CargoCache from docker_pull_helper import get_image_with_version from env_helper import ( + CACHES_PATH, GITHUB_JOB, IMAGES_PATH, REPO_COPY, S3_BUILDS_BUCKET, S3_DOWNLOAD, TEMP_PATH, + CLICKHOUSE_STABLE_VERSION_SUFFIX, ) from git_helper import Git, git_runner from pr_info import PRInfo @@ -25,6 +28,7 @@ from tee_popen import TeePopen from version_helper import ( ClickHouseVersion, + Git, get_version_from_repo, update_version_local, ) @@ -35,8 +39,10 @@ get_instance_type, ) from stopwatch import Stopwatch +from ccache_utils import get_ccache_if_not_exists, upload_ccache -IMAGE_NAME = "clickhouse/binary-builder" + +IMAGE_NAME = "altinityinfra/binary-builder" BUILD_LOG_NAME = "build_log.log" @@ -57,6 +63,7 @@ def get_packager_cmd( cargo_cache_dir: Path, build_version: str, image_version: str, + ccache_path: str, official: bool, ) -> str: package_type = build_config.package_type @@ -74,7 +81,9 @@ def get_packager_cmd( if build_config.tidy: cmd += " --clang-tidy" - cmd += " --cache=sccache" + # NOTE(vnemkov): we are going to continue to use ccache for now + cmd += " --cache=ccache" + cmd += f" --ccache-dir={ccache_path}" cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" cmd += f" --cargo-cache-dir={cargo_cache_dir}" @@ -248,16 +257,20 @@ def main(): logging.info("Got version from repo %s", version.string) - official_flag = pr_info.number == 0 - version_type = "testing" - if "release" in pr_info.labels or "release-lts" in pr_info.labels: - version_type = "stable" - official_flag = True + official_flag = True + version._flavour = version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX + # TODO (vnemkov): right now we'll use simplified version management: + # only update git hash and explicitly set stable version suffix. + # official_flag = pr_info.number == 0 + # version_type = "testing" + # if "release" in pr_info.labels or "release-lts" in pr_info.labels: + # version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX + # official_flag = True update_version_local(version, version_type) - logging.info("Updated local files with version") + logging.info(f"Updated local files with version : {version.string} / {version.describe}") logging.info("Build short name %s", build_name) @@ -268,6 +281,24 @@ def main(): ) cargo_cache.download() + # NOTE(vnemkov): since we still want to use CCACHE over SCCACHE, unlike upstream, + # we need to create local directory for that, just as with 22.8 + ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache") + + logging.info("Will try to fetch cache for our build") + try: + get_ccache_if_not_exists( + ccache_path, s3_helper, pr_info.number, TEMP_PATH, pr_info.release_pr + ) + except Exception as e: + # In case there are issues with ccache, remove the path and do not fail a build + logging.info("Failed to get ccache, building without it. Error: %s", e) + rmtree(ccache_path, ignore_errors=True) + + if not os.path.exists(ccache_path): + logging.info("cache was not fetched, will create empty dir") + os.makedirs(ccache_path) + packager_cmd = get_packager_cmd( build_config, repo_path / "docker" / "packager", @@ -275,6 +306,7 @@ def main(): cargo_cache.directory, version.string, image_version, + ccache_path, official_flag, ) @@ -291,6 +323,7 @@ def main(): subprocess.check_call( f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True ) + subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True) logging.info("Build finished as %s, log path %s", build_status, log_path) if build_status == SUCCESS: cargo_cache.upload() @@ -304,6 +337,10 @@ def main(): ) sys.exit(1) + # Upload the ccache first to have the least build time in case of problems + logging.info("Will upload cache") + upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH) + # FIXME performance performance_urls = [] performance_path = build_output_path / "performance.tar.zst" @@ -335,11 +372,27 @@ def main(): log_path, s3_path_prefix + "/" + log_path.name ) logging.info("Log url %s", log_url) + print(f"::notice ::Log URL: {log_url}") else: logging.info("Build log doesn't exist") + print("Build log doesn't exist") print(f"::notice ::Log URL: {log_url}") + # TODO(vnemkov): make use of Path instead of string concatenation + src_path = os.path.join(TEMP_PATH, "build_source.src.tar.gz") + s3_path = s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" + logging.info("s3_path %s", s3_path) + if os.path.exists(src_path): + src_url = s3_helper.upload_build_file_to_s3( + src_path, s3_path + ) + logging.info("Source tar %s", src_url) + print(f"::notice ::Source tar URL: {src_url}") + else: + logging.info("Source tar doesn't exist") + print("Source tar doesn't exist") + build_result = BuildResult( build_name, log_url, diff --git a/tests/ci/ccache_utils.py b/tests/ci/ccache_utils.py index 75a026d2524d..0a04b6f9e3a5 100644 --- a/tests/ci/ccache_utils.py +++ b/tests/ci/ccache_utils.py @@ -4,13 +4,13 @@ import os import shutil from hashlib import md5 +from env_helper import S3_BUILDS_BUCKET, S3_DOWNLOAD from pathlib import Path import requests # type: ignore from build_download_helper import download_build_with_progress, DownloadException from compress_files import decompress_fast, compress_fast -from env_helper import S3_DOWNLOAD, S3_BUILDS_BUCKET from git_helper import git_runner from s3_helper import S3Helper diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index a2f3a3ab4010..c01eb38cc8d5 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -191,6 +191,7 @@ def validate(self) -> None: "package_ubsan", "package_tsan", "package_msan", + "package_tsan", "package_debug", "binary_release", ], @@ -277,11 +278,11 @@ def validate(self) -> None: "SQLancer (debug)": TestConfig("package_debug"), "Sqllogic test (release)": TestConfig("package_release"), "SQLTest": TestConfig("package_release"), + "Sign release (actions)": TestConfig("package_release"), }, ) CI_CONFIG.validate() - # checks required by Mergeable Check REQUIRED_CHECKS = [ "ClickHouse build check", diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py index dac733805394..1e1c50c892be 100644 --- a/tests/ci/clickhouse_helper.py +++ b/tests/ci/clickhouse_helper.py @@ -194,7 +194,7 @@ def prepare_tests_results_for_clickhouse( report_url: str, check_name: str, ) -> List[dict]: - pull_request_url = "https://github.com/ClickHouse/ClickHouse/commits/master" + pull_request_url = "https://github.com/Altinity/ClickHouse/commits/master" base_ref = "master" head_ref = "master" base_repo = pr_info.repo_full_name diff --git a/tests/ci/codebrowser_check.py b/tests/ci/codebrowser_check.py index a3414156bbab..9a4dc0e40be9 100644 --- a/tests/ci/codebrowser_check.py +++ b/tests/ci/codebrowser_check.py @@ -58,7 +58,7 @@ def main(): if not temp_path.exists(): os.makedirs(temp_path) - docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser") + docker_image = get_image_with_version(IMAGES_PATH, "altinityinfra/codebrowser") # FIXME: the codebrowser is broken with clang-16, workaround with clang-15 # See https://github.com/ClickHouse/ClickHouse/issues/50077 docker_image.version = "49701-4dcdcf4c11b5604f1c5d3121c9c6fea3e957b605" diff --git a/tests/ci/compatibility_check.py b/tests/ci/compatibility_check.py index 8f6d4917efe6..4ed1ecfb779d 100644 --- a/tests/ci/compatibility_check.py +++ b/tests/ci/compatibility_check.py @@ -25,8 +25,8 @@ from stopwatch import Stopwatch from upload_result_helper import upload_results -IMAGE_UBUNTU = "clickhouse/test-old-ubuntu" -IMAGE_CENTOS = "clickhouse/test-old-centos" +IMAGE_UBUNTU = "altinityinfra/test-old-ubuntu" +IMAGE_CENTOS = "altinityinfra/test-old-centos" DOWNLOAD_RETRIES_COUNT = 5 @@ -251,7 +251,7 @@ def url_filter(url): args.check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "failure": sys.exit(1) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 274d0d1d1dfb..de33a9525dd0 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -91,22 +91,23 @@ def get_changed_docker_images( str(files_changed), ) - changed_images = [] - - for dockerfile_dir, image_description in images_dict.items(): - for f in files_changed: - if f.startswith(dockerfile_dir): - name = image_description["name"] - only_amd64 = image_description.get("only_amd64", False) - logging.info( - "Found changed file '%s' which affects " - "docker image '%s' with path '%s'", - f, - name, - dockerfile_dir, - ) - changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - break + # Rebuild all images + changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] + + # for dockerfile_dir, image_description in images_dict.items(): + # for f in files_changed: + # if f.startswith(dockerfile_dir): + # name = image_description["name"] + # only_amd64 = image_description.get("only_amd64", False) + # logging.info( + # "Found changed file '%s' which affects " + # "docker image '%s' with path '%s'", + # f, + # name, + # dockerfile_dir, + # ) + # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + # break # The order is important: dependents should go later than bases, so that # they are built with updated base versions. @@ -236,6 +237,19 @@ def build_and_push_one_image( f"--tag {image.repo}:{version_string} " f"{cache_from} " f"--cache-to type=inline,mode=max " + # FIXME: many tests utilize packages without specifying version, hence docker pulls :latest + # this will fail multiple jobs are going to be executed on different machines and + # push different images as latest. + # To fix it we may: + # - require jobs to be executed on same machine images were built (no parallelism) + # - change all the test's code (mostly docker-compose files in integration tests) + # that depend on said images and push version somehow into docker-compose. + # (and that is lots of work and many potential conflicts with upstream) + # - tag and push all images as :latest and then just pray that collisions are infrequent. + # and if even if collision happens, image is not that different and would still properly work. + # (^^^ CURRENT SOLUTION ^^^) But this is just a numbers game, it will blow up at some point. + # - do something crazy + f"--tag {image.repo}:latest " f"{push_arg}" f"--progress plain {image.full_path}" ) @@ -244,6 +258,7 @@ def build_and_push_one_image( retcode = proc.wait() if retcode != 0: + logging.error("Building image {} failed with error: {}\n{}".format(image, retcode, ''.join(list(open(build_log, 'rt'))))) return False, build_log logging.info("Processing of %s successfully finished", image.repo) @@ -383,8 +398,8 @@ def main(): if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -467,7 +482,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == "failure": sys.exit(1) diff --git a/tests/ci/docker_manifests_merge.py b/tests/ci/docker_manifests_merge.py index 9b61134fa758..08d642d27949 100644 --- a/tests/ci/docker_manifests_merge.py +++ b/tests/ci/docker_manifests_merge.py @@ -250,8 +250,8 @@ def main(): args = parse_args() if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -315,7 +315,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if __name__ == "__main__": diff --git a/tests/ci/docker_pull_helper.py b/tests/ci/docker_pull_helper.py index e1327f505a07..224d3a2a7ae1 100644 --- a/tests/ci/docker_pull_helper.py +++ b/tests/ci/docker_pull_helper.py @@ -5,6 +5,7 @@ import time import subprocess import logging +import traceback from pathlib import Path from typing import List, Optional, Union @@ -52,11 +53,28 @@ def get_images_with_versions( for image_name in required_images: docker_image = DockerImage(image_name, version) if image_name in images: - docker_image.version = images[image_name] + image_version = images[image_name] + # NOTE(vnemkov): For some reason we can get version as list of versions, + # in this case choose one that has commit hash and hence is the longest string. + # E.g. from ['latest-amd64', '0-amd64', '0-473d8f560fc78c6cdaabb960a537ca5ab49f795f-amd64'] + # choose '0-473d8f560fc78c6cdaabb960a537ca5ab49f795f-amd64' since it 100% points to proper commit. + if isinstance(image_version, list): + max_len = 0 + max_len_version = '' + for version_variant in image_version: + if len(version_variant) > max_len: + max_len = len(version_variant) + max_len_version = version_variant + logging.debug(f"selected version {max_len_version} from {image_version}") + image_version = max_len_version + + docker_image.version = image_version + docker_images.append(docker_image) latest_error = Exception("predefined to avoid access before created") if pull: + latest_error = None for docker_image in docker_images: for i in range(10): try: @@ -70,7 +88,8 @@ def get_images_with_versions( except Exception as ex: latest_error = ex time.sleep(i * 3) - logging.info("Got execption pulling docker %s", ex) + logging.info("Got exception pulling docker %s", ex) + latest_error = traceback.format_exc() else: raise Exception( "Cannot pull dockerhub for image docker pull " diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 55bd2983ea48..c713fbb5f420 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -34,7 +34,11 @@ ) TEMP_PATH = p.join(RUNNER_TEMP, "docker_images_check") -BUCKETS = {"amd64": "package_release", "arm64": "package_aarch64"} +BUCKETS = { + "amd64": "package_release", + # NOTE(vnemkov): arm64 is temporary not supported + # "arm64": "package_aarch64" +} git = Git(ignore_no_tags=True) @@ -56,7 +60,7 @@ def parse_args() -> argparse.Namespace: "--version", type=version_arg, default=get_version_from_repo(git=git).string, - help="a version to build, automaticaly got from version_helper, accepts either " + help="a version to build, automatically got from version_helper, accepts either " "tag ('refs/tags/' is removed automatically) or a normal 22.2.2.2 format", ) parser.add_argument( @@ -77,7 +81,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--image-repo", type=str, - default="clickhouse/clickhouse-server", + default="altinityinfra/clickhouse-server", help="image name on docker hub", ) parser.add_argument( @@ -337,8 +341,8 @@ def main(): if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) @@ -385,7 +389,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status != "success": sys.exit(1) diff --git a/tests/ci/docker_test.py b/tests/ci/docker_test.py index 8aab50ed0825..def24a07f836 100644 --- a/tests/ci/docker_test.py +++ b/tests/ci/docker_test.py @@ -38,67 +38,67 @@ def test_get_changed_docker_images(self): self.maxDiff = None expected = sorted( [ - di.DockerImage("docker/test/base", "clickhouse/test-base", False), - di.DockerImage("docker/docs/builder", "clickhouse/docs-builder", True), + di.DockerImage("docker/test/base", "altinityinfra/test-base", False), + di.DockerImage("docker/docs/builder", "altinityinfra/docs-builder", True), di.DockerImage( "docker/test/sqltest", - "clickhouse/sqltest", + "altinityinfra/sqltest", False, - "clickhouse/test-base", # type: ignore + "altinityinfra/test-base", # type: ignore ), di.DockerImage( "docker/test/stateless", - "clickhouse/stateless-test", + "altinityinfra/stateless-test", False, - "clickhouse/test-base", # type: ignore + "altinityinfra/test-base", # type: ignore ), di.DockerImage( "docker/test/integration/base", - "clickhouse/integration-test", + "altinityinfra/integration-test", False, - "clickhouse/test-base", # type: ignore - ), - di.DockerImage( - "docker/test/fuzzer", - "clickhouse/fuzzer", - False, - "clickhouse/test-base", # type: ignore - ), - di.DockerImage( - "docker/test/keeper-jepsen", - "clickhouse/keeper-jepsen-test", - False, - "clickhouse/test-base", # type: ignore - ), - di.DockerImage( - "docker/docs/check", - "clickhouse/docs-check", - False, - "clickhouse/docs-builder", # type: ignore - ), - di.DockerImage( - "docker/docs/release", - "clickhouse/docs-release", - False, - "clickhouse/docs-builder", # type: ignore + "altinityinfra/test-base", # type: ignore ), + # di.DockerImage( + # "docker/test/fuzzer", + # "altinityinfra/fuzzer", + # False, + # "altinityinfra/test-base", # type: ignore + # ), + # di.DockerImage( + # # "docker/test/keeper-jepsen", + # # "altinityinfra/keeper-jepsen-test", + # False, + # "altinityinfra/test-base", # type: ignore + # ), + # di.DockerImage( + # "docker/docs/check", + # "altinityinfra/docs-check", + # False, + # "altinityinfra/docs-builder", # type: ignore + # ), + # di.DockerImage( + # "docker/docs/release", + # "altinityinfra/docs-release", + # False, + # "altinityinfra/docs-builder", # type: ignore + # ), di.DockerImage( "docker/test/stateful", - "clickhouse/stateful-test", + "altinityinfra/stateful-test", False, - "clickhouse/stateless-test", # type: ignore + "altinityinfra/stateless-test", # type: ignore ), di.DockerImage( "docker/test/unit", - "clickhouse/unit-test", + "altinityinfra/unit-test", False, - "clickhouse/stateless-test", # type: ignore + "altinityinfra/stateless-test", # type: ignore ), di.DockerImage( "docker/test/stress", - "clickhouse/stress-test", + "altinityinfra/stress-test", False, - "clickhouse/stateful-test", # type: ignore + "altinityinfra/stateful-test", # type: ignore ), ] ) diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py index f7339d59a5eb..2954f924dc54 100644 --- a/tests/ci/docs_check.py +++ b/tests/ci/docs_check.py @@ -82,7 +82,7 @@ def main(): elif args.force: logging.info("Check the docs because of force flag") - docker_image = get_image_with_version(temp_path, "clickhouse/docs-builder") + docker_image = get_image_with_version(temp_path, "altinityinfra/docs-builder") test_output = temp_path / "docs_check_log" test_output.mkdir(parents=True, exist_ok=True) @@ -148,7 +148,7 @@ def main(): NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == "failure": sys.exit(1) diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 04532ea3b962..2438fb7ecd92 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -17,7 +17,7 @@ CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN") GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "") GITHUB_JOB = os.getenv("GITHUB_JOB", "local") -GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse") +GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "Altinity/ClickHouse") GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "0") GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com") GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root) @@ -26,9 +26,11 @@ REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) REPO_COPY = os.getenv("REPO_COPY", git_root) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) -S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds") -S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports") +S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "altinity-build-artifacts") +S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts") S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") +CLICKHOUSE_STABLE_VERSION_SUFFIX = os.getenv("CLICKHOUSE_STABLE_VERSION_SUFFIX", "stable") + S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL) S3_ARTIFACT_DOWNLOAD_TEMPLATE = ( f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/" diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py index 17699cffad1d..bc96105dc932 100644 --- a/tests/ci/fast_test_check.py +++ b/tests/ci/fast_test_check.py @@ -113,7 +113,7 @@ def main(): sys.exit(1) sys.exit(0) - docker_image = get_image_with_version(temp_path, "clickhouse/fasttest") + docker_image = get_image_with_version(temp_path, "altinityinfra/fasttest") s3_helper = S3Helper() @@ -212,7 +212,7 @@ def main(): report_url, NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) # Refuse other checks to run if fast test failed if state != "success": diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index 0736367a62f4..5046c13f81bd 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -68,9 +68,9 @@ def get_additional_envs( def get_image_name(check_name: str) -> str: if "stateless" in check_name.lower(): - return "clickhouse/stateless-test" + return "altinityinfra/stateless-test" if "stateful" in check_name.lower(): - return "clickhouse/stateful-test" + return "altinityinfra/stateful-test" else: raise Exception(f"Cannot deduce image name based on check name {check_name}") @@ -119,7 +119,8 @@ def get_run_command( return ( f"docker run --volume={builds_path}:/package_folder " - f"{ci_logs_args}" + f"{ci_logs_args} " + f"--dns=8.8.8.8 " f"--volume={repo_path}/tests:/usr/share/clickhouse-test " f"{volume_with_broken_test}" f"--volume={result_path}:/test_output " @@ -271,10 +272,12 @@ def main(): run_by_hash_total = 0 check_name_with_group = check_name - rerun_helper = RerunHelper(commit, check_name_with_group) - if rerun_helper.is_already_finished_by_status(): - logging.info("Check is already finished according to github status, exiting") - sys.exit(0) + # Always re-run, even if it finished in previous run. + # gh = Github(get_best_robot_token()) + # rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) + # if rerun_helper.is_already_finished_by_status(): + # logging.info("Check is already finished according to github status, exiting") + # sys.exit(0) tests_to_run = [] if run_changed_tests: @@ -401,7 +404,7 @@ def main(): report_url, check_name_with_group, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state != "success": if FORCE_TESTS_LABEL in pr_info.labels: diff --git a/tests/ci/get_robot_token.py b/tests/ci/get_robot_token.py index 530f894a36a0..b93eea52f700 100644 --- a/tests/ci/get_robot_token.py +++ b/tests/ci/get_robot_token.py @@ -53,8 +53,20 @@ def get_parameters_from_ssm( ROBOT_TOKEN = None # type: Optional[Token] +# NOTE(Arthur Passos): Original CI code uses the "_original" version of this method. Each robot token is rate limited +# and the original implementation selects the "best one". To make it simpler and iterate faster, +# we are using only one robot and keeping the method signature. In the future we might reconsider +# having multiple robot tokens +def get_best_robot_token(token_prefix_env_name="github_robot_token"): + # Re-use already fetched token (same as in get_best_robot_token_original) + # except here we assume it is always a string (since we use only one token and don't do token rotation) + global ROBOT_TOKEN + if ROBOT_TOKEN is not None: + return ROBOT_TOKEN + ROBOT_TOKEN = get_parameter_from_ssm(token_prefix_env_name) + return ROBOT_TOKEN -def get_best_robot_token(tokens_path: str = "/github-tokens") -> str: +def get_best_robot_token_original(token_prefix_env_name: str="github_robot_token_") -> str: global ROBOT_TOKEN if ROBOT_TOKEN is not None: return ROBOT_TOKEN.value diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index 9927d5a42489..35e825143a65 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -10,9 +10,11 @@ # ^ and $ match subline in `multiple\nlines` # \A and \Z match only start and end of the whole string +# NOTE (vnemkov): support both upstream tag style: v22.x.y.z-lts and Altinity tag style: v22.x.y.z.altinitystable +# Because at early release stages there could be no Altinity tag set on commit, only upstream one. RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z" TAG_REGEXP = ( - r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts)\Z" + r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*[-\.](testing|prestable|stable|lts|altinitystable)\Z" ) SHA_REGEXP = re.compile(r"\A([0-9]|[a-f]){40}\Z") diff --git a/tests/ci/git_test.py b/tests/ci/git_test.py index 3aedd8a8dea1..0c28c8d38421 100644 --- a/tests/ci/git_test.py +++ b/tests/ci/git_test.py @@ -70,6 +70,9 @@ def test_tags(self): with self.assertRaises(Exception): setattr(self.git, tag_attr, tag) + def check_tag(self): + self.git.check_tag("v21.12.333.4567-altinitystable") + def test_tweak(self): self.git.commits_since_tag = 0 self.assertEqual(self.git.tweak, 1) @@ -79,3 +82,6 @@ def test_tweak(self): self.assertEqual(self.git.tweak, 22224) self.git.commits_since_tag = 0 self.assertEqual(self.git.tweak, 22222) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/ci/install_check.py b/tests/ci/install_check.py index 9971d0c236c6..44bb4c2ffd09 100644 --- a/tests/ci/install_check.py +++ b/tests/ci/install_check.py @@ -36,8 +36,8 @@ from upload_result_helper import upload_results -RPM_IMAGE = "clickhouse/install-rpm-test" -DEB_IMAGE = "clickhouse/install-deb-test" +RPM_IMAGE = "altinityinfra/install-rpm-test" +DEB_IMAGE = "altinityinfra/install-deb-test" TEMP_PATH = Path(TEMP) LOGS_PATH = TEMP_PATH / "tests_logs" diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index 9ac339ac17db..04259d46afde 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -39,18 +39,18 @@ # When update, update # tests/integration/ci-runner.py:ClickhouseIntegrationTestsRunner.get_images_names too IMAGES = [ - "clickhouse/dotnet-client", - "clickhouse/integration-helper", - "clickhouse/integration-test", - "clickhouse/integration-tests-runner", - "clickhouse/kerberized-hadoop", - "clickhouse/kerberos-kdc", - "clickhouse/mysql-golang-client", - "clickhouse/mysql-java-client", - "clickhouse/mysql-js-client", - "clickhouse/mysql-php-client", - "clickhouse/nginx-dav", - "clickhouse/postgresql-java-client", + "altinityinfra/dotnet-client", + "altinityinfra/integration-helper", + "altinityinfra/integration-test", + "altinityinfra/integration-tests-runner", + "altinityinfra/kerberized-hadoop", + "altinityinfra/kerberos-kdc", + "altinityinfra/mysql-golang-client", + "altinityinfra/mysql-java-client", + "altinityinfra/mysql-js-client", + "altinityinfra/mysql-php-client", + "altinityinfra/nginx-dav", + "altinityinfra/postgresql-java-client", ] @@ -210,10 +210,12 @@ def main(): gh = Github(get_best_robot_token(), per_page=100) commit = get_commit(gh, pr_info.sha) - rerun_helper = RerunHelper(commit, check_name_with_group) - if rerun_helper.is_already_finished_by_status(): - logging.info("Check is already finished according to github status, exiting") - sys.exit(0) + # Always re-run, even if it finished in previous run. + # gh = Github(get_best_robot_token()) + # rerun_helper = RerunHelper(gh, pr_info, check_name_with_group) + # if rerun_helper.is_already_finished_by_status(): + # logging.info("Check is already finished according to github status, exiting") + # sys.exit(0) images = get_images_with_versions(reports_path, IMAGES) result_path = temp_path / "output_dir" @@ -311,7 +313,7 @@ def main(): check_name_with_group, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "failure": sys.exit(1) diff --git a/tests/ci/jepsen_check.py b/tests/ci/jepsen_check.py index 94ec8f937900..ae48cf960ded 100644 --- a/tests/ci/jepsen_check.py +++ b/tests/ci/jepsen_check.py @@ -34,10 +34,10 @@ KEEPER_DESIRED_INSTANCE_COUNT = 3 SERVER_DESIRED_INSTANCE_COUNT = 4 -KEEPER_IMAGE_NAME = "clickhouse/keeper-jepsen-test" +KEEPER_IMAGE_NAME = "altinityinfra/keeper-jepsen-test" KEEPER_CHECK_NAME = "ClickHouse Keeper Jepsen" -SERVER_IMAGE_NAME = "clickhouse/server-jepsen-test" +SERVER_IMAGE_NAME = "altinityinfra/server-jepsen-test" SERVER_CHECK_NAME = "ClickHouse Server Jepsen" @@ -304,7 +304,7 @@ def main(): report_url, check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) clear_autoscaling_group() diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py index 2e4989c66cfc..fd523fa0318f 100644 --- a/tests/ci/performance_comparison_check.py +++ b/tests/ci/performance_comparison_check.py @@ -31,7 +31,7 @@ from clickhouse_helper import get_instance_type from stopwatch import Stopwatch -IMAGE_NAME = "clickhouse/performance-comparison" +IMAGE_NAME = "altinityinfra/performance-comparison" def get_run_command( diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py new file mode 100644 index 000000000000..701ccc29b65b --- /dev/null +++ b/tests/ci/sign_release.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +import sys +import os +import logging +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from s3_helper import S3Helper +from pr_info import PRInfo +from build_download_helper import download_builds_filter +import hashlib + +GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") +GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") + +CHECK_NAME = "Sign release (actions)" + +def hash_file(file_path): + BLOCK_SIZE = 65536 # The size of each read from the file + + file_hash = hashlib.sha256() # Create the hash object, can use something other than `.sha256()` if you wish + with open(file_path, 'rb') as f: # Open the file to read it's bytes + fb = f.read(BLOCK_SIZE) # Read from the file. Take in the amount declared above + while len(fb) > 0: # While there is still data being read from the file + file_hash.update(fb) # Update the hash + fb = f.read(BLOCK_SIZE) # Read the next block from the file + + hash_file_path = file_path + '.sha256' + with open(hash_file_path, 'x') as f: + digest = file_hash.hexdigest() + f.write(digest) + print(f'Hashed {file_path}: {digest}') + + return hash_file_path + +def sign_file(file_path): + priv_key_file_path = 'priv.key' + with open(priv_key_file_path, 'x') as f: + f.write(GPG_BINARY_SIGNING_KEY) + + out_file_path = f'{file_path}.gpg' + + os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}') + os.system(f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') + print(f"Signed {file_path}") + os.remove(priv_key_file_path) + + return out_file_path + +def main(): + reports_path = REPORTS_PATH + + if not os.path.exists(TEMP_PATH): + os.makedirs(TEMP_PATH) + + pr_info = PRInfo() + + logging.info("Repo copy path %s", REPO_COPY) + + s3_helper = S3Helper() + + s3_path_prefix = f"{pr_info.number}/{pr_info.sha}/" + CHECK_NAME.lower().replace( + " ", "_" + ).replace("(", "_").replace(")", "_").replace(",", "_") + + # downloads `package_release` artifacts generated + download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) + + for f in os.listdir(TEMP_PATH): + full_path = os.path.join(TEMP_PATH, f) + hashed_file_path = hash_file(full_path) + signed_file_path = sign_file(hashed_file_path) + s3_path = f'{s3_path_prefix}/{os.path.basename(signed_file_path)}' + s3_helper.upload_build_file_to_s3(signed_file_path, s3_path) + print(f'Uploaded file {signed_file_path} to {s3_path}') + + # Signed hashes are: + # clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-client_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-library-bridge.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-odbc-bridge.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-server-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-keeper_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-server-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-keeper-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse.sha512.gpg + + sys.exit(0) + +if __name__ == "__main__": + main() diff --git a/tests/ci/sqlancer_check.py b/tests/ci/sqlancer_check.py index 47bc3b2c1e8c..f14949e3b8a4 100644 --- a/tests/ci/sqlancer_check.py +++ b/tests/ci/sqlancer_check.py @@ -29,7 +29,7 @@ from tee_popen import TeePopen from upload_result_helper import upload_results -IMAGE_NAME = "clickhouse/sqlancer-test" +IMAGE_NAME = "altinityinfra/sqlancer-test" def get_run_command(download_url: str, workspace_path: Path, image: DockerImage) -> str: diff --git a/tests/ci/sqllogic_test.py b/tests/ci/sqllogic_test.py index 7650a4afa40a..55c2134d5b48 100755 --- a/tests/ci/sqllogic_test.py +++ b/tests/ci/sqllogic_test.py @@ -30,7 +30,7 @@ NO_CHANGES_MSG = "Nothing to run" -IMAGE_NAME = "clickhouse/sqllogic-test" +IMAGE_NAME = "altinityinfra/sqllogic-test" def get_run_command( diff --git a/tests/ci/sqltest.py b/tests/ci/sqltest.py index a4eb1b23349d..0fa19447946f 100644 --- a/tests/ci/sqltest.py +++ b/tests/ci/sqltest.py @@ -28,7 +28,7 @@ from s3_helper import S3Helper from stopwatch import Stopwatch -IMAGE_NAME = "clickhouse/sqltest" +IMAGE_NAME = "altinityinfra/sqltest" def get_run_command(pr_number, sha, download_url, workspace_path, image): diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index afc5c3d74980..012bd1bcf88c 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -190,11 +190,11 @@ def run_stress_test(docker_image_name: str) -> None: report_url, check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "failure": sys.exit(1) if __name__ == "__main__": - run_stress_test("clickhouse/stress-test") + run_stress_test("altinityinfra/stress-test") diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index dd3cf1a51ee6..d2f5a52acd4c 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -161,7 +161,7 @@ def main(): code = int(state != "success") sys.exit(code) - docker_image = get_image_with_version(temp_path, "clickhouse/style-test") + docker_image = get_image_with_version(temp_path, "altinityinfra/style-test") s3_helper = S3Helper() cmd = ( @@ -197,7 +197,7 @@ def main(): report_url, NAME, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state in ["error", "failure"]: sys.exit(1) diff --git a/tests/ci/tests/docker_images_for_tests.json b/tests/ci/tests/docker_images_for_tests.json index 70db87605616..008c60ba6206 100644 --- a/tests/ci/tests/docker_images_for_tests.json +++ b/tests/ci/tests/docker_images_for_tests.json @@ -1,162 +1,124 @@ { "docker/packager/deb": { - "name": "clickhouse/deb-builder", + "name": "altinityinfra/deb-builder", "dependent": [] }, "docker/packager/binary": { - "name": "clickhouse/binary-builder", - "dependent": [ - "docker/test/codebrowser" - ] + "name": "altinityinfra/binary-builder", + "dependent": [] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", - "dependent": [] - }, - "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", - "dependent": [] - }, - "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/test/base", "docker/test/fasttest" ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", - "dependent": [] - }, - "docker/test/stress": { - "name": "clickhouse/stress-test", - "dependent": [] - }, - "docker/test/codebrowser": { - "name": "clickhouse/codebrowser", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/integration/runner": { - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", - "dependent": [] - }, - "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/fasttest", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", - "dependent": [ + "name": "altinityinfra/test-base", + "dependent": [ "docker/test/stateless", "docker/test/integration/base", "docker/test/fuzzer", "docker/test/keeper-jepsen", "docker/test/sqltest" - ] + ] }, "docker/test/integration/kerberized_hadoop": { - "name": "clickhouse/kerberized-hadoop", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", + "name": "altinityinfra/sqlancer-test", "dependent": [] }, "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", - "dependent": [] - }, - "docker/docs/builder": { - "name": "clickhouse/docs-builder", - "only_amd64": true, - "dependent": [ - "docker/docs/check", - "docker/docs/release" - ] - }, - "docker/docs/check": { - "name": "clickhouse/docs-check", - "dependent": [] - }, - "docker/docs/release": { - "name": "clickhouse/docs-release", + "name": "altinityinfra/keeper-jepsen-test", "dependent": [] }, "docker/test/sqllogic": { - "name": "clickhouse/sqllogic-test", + "name": "altinityinfra/sqllogic-test", "dependent": [] }, "docker/test/sqltest": { - "name": "clickhouse/sqltest", + "name": "altinityinfra/sqltest", "dependent": [] } } diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py index 6384b0ff432e..6f6aa3c3a9b2 100644 --- a/tests/ci/unit_tests_check.py +++ b/tests/ci/unit_tests_check.py @@ -32,7 +32,7 @@ from upload_result_helper import upload_results -IMAGE_NAME = "clickhouse/unit-test" +IMAGE_NAME = "altinityinfra/unit-test" def get_test_name(line): @@ -183,7 +183,7 @@ def main(): check_name, ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if state == "failure": sys.exit(1) diff --git a/tests/ci/upgrade_check.py b/tests/ci/upgrade_check.py index 83b6f9e299fd..f84451cad81d 100644 --- a/tests/ci/upgrade_check.py +++ b/tests/ci/upgrade_check.py @@ -1,4 +1,4 @@ import stress_check if __name__ == "__main__": - stress_check.run_stress_test("clickhouse/upgrade-check") + stress_check.run_stress_test("altinityinfra/upgrade-check") diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index fb046e989a91..4b75974500c8 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -46,6 +46,7 @@ def __init__( revision: Union[int, str], git: Optional[Git], tweak: Optional[str] = None, + flavour: Optional[str] = None, ): self._major = int(major) self._minor = int(minor) @@ -59,6 +60,7 @@ def __init__( self._tweak = self._git.tweak self._describe = "" self._description = "" + self._flavour = flavour def update(self, part: Literal["major", "minor", "patch"]) -> "ClickHouseVersion": """If part is valid, returns a new version""" @@ -132,9 +134,12 @@ def description(self) -> str: @property def string(self): - return ".".join( + version_as_string = ".".join( (str(self.major), str(self.minor), str(self.patch), str(self.tweak)) ) + if self._flavour: + version_as_string = f"{version_as_string}.{self._flavour}" + return version_as_string def as_dict(self) -> VERSIONS: return { @@ -155,7 +160,10 @@ def with_description(self, version_type): if version_type not in VersionType.VALID: raise ValueError(f"version type {version_type} not in {VersionType.VALID}") self._description = version_type - self._describe = f"v{self.string}-{version_type}" + if version_type == self._flavour: + self._describe = f"v{self.string}" + else: + self._describe = f"v{self.string}-{version_type}" def __eq__(self, other: Any) -> bool: if not isinstance(self, type(other)): @@ -183,16 +191,17 @@ def __le__(self, other: "ClickHouseVersion") -> bool: class VersionType: LTS = "lts" PRESTABLE = "prestable" - STABLE = "stable" + STABLE = "altinitystable" TESTING = "testing" VALID = (TESTING, PRESTABLE, STABLE, LTS) def validate_version(version: str) -> None: + # NOTE(vnemkov): minor but imporant fixes, so versions with 'flavour' are threated as valid (e.g. 22.8.8.4.altinitystable) parts = version.split(".") - if len(parts) != 4: + if len(parts) < 4: raise ValueError(f"{version} does not contain 4 parts") - for part in parts: + for part in parts[:4]: int(part) @@ -232,6 +241,9 @@ def get_version_from_repo( versions["patch"], versions["revision"], git, + # Explicitly use tweak value from version file + tweak=versions.get("tweak", versions["revision"]), + flavour=versions.get("flavour", None) ) @@ -239,8 +251,17 @@ def get_version_from_string( version: str, git: Optional[Git] = None ) -> ClickHouseVersion: validate_version(version) - parts = version.split(".") - return ClickHouseVersion(parts[0], parts[1], parts[2], -1, git, parts[3]) + # dict for simple handling of missing parts with parts.get(index, default) + parts = dict(enumerate(version.split("."))) + return ClickHouseVersion( + parts[0], + parts[1], + parts[2], + -1, + git, + parts.get(3, None), + parts.get(4, None) + ) def get_version_from_tag(tag: str) -> ClickHouseVersion: @@ -314,7 +335,7 @@ def update_contributors( cfd.write(content) -def update_version_local(version, version_type="testing"): +def update_version_local(version : ClickHouseVersion, version_type="testing"): update_contributors() version.with_description(version_type) update_cmake_version(version) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index aa89ccf11b36..f9aeb520c630 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -303,18 +303,18 @@ def shuffle_test_groups(self): @staticmethod def get_images_names(): return [ - "clickhouse/dotnet-client", - "clickhouse/integration-helper", - "clickhouse/integration-test", - "clickhouse/integration-tests-runner", - "clickhouse/kerberized-hadoop", - "clickhouse/kerberos-kdc", - "clickhouse/mysql-golang-client", - "clickhouse/mysql-java-client", - "clickhouse/mysql-js-client", - "clickhouse/mysql-php-client", - "clickhouse/nginx-dav", - "clickhouse/postgresql-java-client", + "altinityinfra/dotnet-client", + "altinityinfra/integration-helper", + "altinityinfra/integration-test", + "altinityinfra/integration-tests-runner", + "altinityinfra/kerberized-hadoop", + "altinityinfra/kerberos-kdc", + "altinityinfra/mysql-golang-client", + "altinityinfra/mysql-java-client", + "altinityinfra/mysql-js-client", + "altinityinfra/mysql-php-client", + "altinityinfra/nginx-dav", + "altinityinfra/postgresql-java-client", ] def _pre_pull_images(self, repo_path): @@ -322,7 +322,7 @@ def _pre_pull_images(self, repo_path): cmd = ( "cd {repo_path}/tests/integration && " - "timeout --signal=KILL 1h ./runner {runner_opts} {image_cmd} --pre-pull --command '{command}' ".format( + "timeout --signal=KILL 2h ./runner {runner_opts} {image_cmd} --pre-pull --command '{command}' ".format( repo_path=repo_path, runner_opts=self._get_runner_opts(), image_cmd=image_cmd, @@ -546,7 +546,7 @@ def _get_runner_image_cmd(self, repo_path): "--docker-image-version", ): for img in self.get_images_names(): - if img == "clickhouse/integration-tests-runner": + if img == "altinityinfra/integration-tests-runner": runner_version = self.get_image_version(img) logging.info( "Can run with custom docker image version %s", runner_version diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index 7dbb1d6db36b..04b8bfb75b85 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -40,6 +40,7 @@ except Exception as e: logging.warning(f"Cannot import some modules, some tests may not work: {e}") + from dict2xml import dict2xml from kazoo.client import KazooClient from kazoo.exceptions import KazooException @@ -945,7 +946,7 @@ def setup_keeper_cmd(self, instance, env_variables, docker_compose_yml_dir): env_variables["keeper_binary"] = binary_path env_variables["keeper_cmd_prefix"] = keeper_cmd_prefix - env_variables["image"] = "clickhouse/integration-test:" + self.docker_base_tag + env_variables["image"] = "altinityinfra/integration-test:" + self.docker_base_tag env_variables["user"] = str(os.getuid()) env_variables["keeper_fs"] = "bind" for i in range(1, 4): @@ -1562,7 +1563,7 @@ def add_instance( allow_analyzer=True, hostname=None, env_variables=None, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag=None, stay_alive=False, ipv4_address=None, @@ -3250,7 +3251,7 @@ def __init__( copy_common_configs=True, hostname=None, env_variables=None, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag="latest", stay_alive=False, ipv4_address=None, diff --git a/tests/integration/helpers/network.py b/tests/integration/helpers/network.py index e6e79dc79478..3689bb409d15 100644 --- a/tests/integration/helpers/network.py +++ b/tests/integration/helpers/network.py @@ -243,7 +243,7 @@ def __init__( def _ensure_container(self): if self._container is None or self._container_expire_time <= time.time(): - image_name = "clickhouse/integration-helper:" + os.getenv( + image_name = "altinityinfra/integration-helper:" + os.getenv( "DOCKER_HELPER_TAG", "latest" ) for i in range(5): diff --git a/tests/integration/runner b/tests/integration/runner index 4c2b10545389..ef00881d3358 100755 --- a/tests/integration/runner +++ b/tests/integration/runner @@ -29,7 +29,7 @@ CONFIG_DIR_IN_REPO = "programs/server" INTEGRATION_DIR_IN_REPO = "tests/integration" SRC_DIR_IN_REPO = "src" -DIND_INTEGRATION_TESTS_IMAGE_NAME = "clickhouse/integration-tests-runner" +DIND_INTEGRATION_TESTS_IMAGE_NAME = "altinityinfra/integration-tests-runner" def check_args_and_update_paths(args): @@ -333,25 +333,25 @@ if __name__ == "__main__": [image, tag] = img_tag.split(":") if image == "clickhouse/dotnet-client": env_tags += "-e {}={} ".format("DOCKER_DOTNET_CLIENT_TAG", tag) - elif image == "clickhouse/integration-helper": + elif image == "altinityinfra/integration-helper": env_tags += "-e {}={} ".format("DOCKER_HELPER_TAG", tag) - elif image == "clickhouse/integration-test": + elif image == "altinityinfra/integration-test": env_tags += "-e {}={} ".format("DOCKER_BASE_TAG", tag) - elif image == "clickhouse/kerberized-hadoop": + elif image == "altinityinfra/kerberized-hadoop": env_tags += "-e {}={} ".format("DOCKER_KERBERIZED_HADOOP_TAG", tag) - elif image == "clickhouse/kerberos-kdc": + elif image == "altinityinfra/kerberos-kdc": env_tags += "-e {}={} ".format("DOCKER_KERBEROS_KDC_TAG", tag) - elif image == "clickhouse/mysql-golang-client": + elif image == "altinityinfra/mysql-golang-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_GOLANG_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-java-client": + elif image == "altinityinfra/mysql-java-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_JAVA_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-js-client": + elif image == "altinityinfra/mysql-js-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_JS_CLIENT_TAG", tag) - elif image == "clickhouse/mysql-php-client": + elif image == "altinityinfra/mysql-php-client": env_tags += "-e {}={} ".format("DOCKER_MYSQL_PHP_CLIENT_TAG", tag) - elif image == "clickhouse/nginx-dav": + elif image == "altinityinfra/nginx-dav": env_tags += "-e {}={} ".format("DOCKER_NGINX_DAV_TAG", tag) - elif image == "clickhouse/postgresql-java-client": + elif image == "altinityinfra/postgresql-java-client": env_tags += "-e {}={} ".format("DOCKER_POSTGRESQL_JAVA_CLIENT_TAG", tag) else: logging.info("Unknown image %s" % (image)) diff --git a/tests/integration/test_backward_compatibility/test_functions.py b/tests/integration/test_backward_compatibility/test_functions.py index c86c3ba0ab29..eb82e720f12c 100644 --- a/tests/integration/test_backward_compatibility/test_functions.py +++ b/tests/integration/test_backward_compatibility/test_functions.py @@ -12,6 +12,7 @@ upstream = cluster.add_instance("upstream", allow_analyzer=False) backward = cluster.add_instance( "backward", + # NOTE(vnemkov): don't change that to altinitystable/clickhouse-server image="clickhouse/clickhouse-server", # Note that a bug changed the string representation of several aggregations in 22.9 and 22.10 and some minor # releases of 22.8, 22.7 and 22.3 diff --git a/tests/integration/test_backward_compatibility/test_insert_profile_events.py b/tests/integration/test_backward_compatibility/test_insert_profile_events.py index 8564c6b59526..bc9676541fb3 100644 --- a/tests/integration/test_backward_compatibility/test_insert_profile_events.py +++ b/tests/integration/test_backward_compatibility/test_insert_profile_events.py @@ -10,6 +10,7 @@ upstream_node = cluster.add_instance("upstream_node", allow_analyzer=False) old_node = cluster.add_instance( "old_node", + # NOTE(vnemkov): do not change to altinityinfra/clickhouse-server image="clickhouse/clickhouse-server", tag="22.5.1.2079", with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py b/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py index 04016755a245..9c62b78a328f 100644 --- a/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py +++ b/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py @@ -6,6 +6,7 @@ # Version 21.6.3.14 has incompatible partition id for tables with UUID in partition key. node_22_6 = cluster.add_instance( "node_22_6", + # NOTE(vnemkov): do not change to altinityinfra/clickhouse-server image="clickhouse/clickhouse-server", tag="22.6", stay_alive=True, diff --git a/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py b/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py index 9c9d1a4d3121..6e25c71c92e1 100644 --- a/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py +++ b/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py @@ -6,6 +6,7 @@ node_old = cluster.add_instance( "node1", + # NOTE(vnemkov): do not change to altinityinfra/clickhouse-server image="clickhouse/clickhouse-server", tag="22.8", stay_alive=True, diff --git a/tests/integration/test_distributed_inter_server_secret/test.py b/tests/integration/test_distributed_inter_server_secret/test.py index 1aeaddcf3c5a..857980722e1c 100644 --- a/tests/integration/test_distributed_inter_server_secret/test.py +++ b/tests/integration/test_distributed_inter_server_secret/test.py @@ -29,6 +29,7 @@ def make_instance(name, cfg, *args, **kwargs): backward = make_instance( "backward", "configs/remote_servers_backward.xml", + # NOTE(vnemkov): do not change to altinityinfra/clickhouse-server image="clickhouse/clickhouse-server", # version without DBMS_MIN_REVISION_WITH_INTERSERVER_SECRET_V2 tag="23.2.3", diff --git a/tests/integration/test_storage_kafka/test.py b/tests/integration/test_storage_kafka/test.py index b1191af60b7c..bc9a540dc067 100644 --- a/tests/integration/test_storage_kafka/test.py +++ b/tests/integration/test_storage_kafka/test.py @@ -30,12 +30,24 @@ from kafka.protocol.group import MemberAssignment from kafka.admin import NewTopic +from pathlib import Path +from helpers.cluster import run_and_check # protoc --version # libprotoc 3.0.0 # # to create kafka_pb2.py # protoc --python_out=. kafka.proto +# Regenerate _pb2 files on each run, to make sure test doesn't depend installed protobuf version +proto_dir = Path(__file__).parent / "clickhouse_path/format_schemas" +gen_dir = Path(__file__).parent +gen_dir.mkdir(exist_ok=True) +run_and_check( + f"python3 -m grpc_tools.protoc -I{proto_dir!s} --python_out={gen_dir!s} --grpc_python_out={gen_dir!s} \ + {proto_dir!s}/kafka.proto", + shell=True, +) + from . import kafka_pb2 from . import social_pb2 from . import message_with_repeated_pb2 diff --git a/utils/clickhouse-docker b/utils/clickhouse-docker index cfe515f1de54..34b637f0eaad 100755 --- a/utils/clickhouse-docker +++ b/utils/clickhouse-docker @@ -26,11 +26,11 @@ then # https://stackoverflow.com/a/39454426/1555175 wget -nv https://registry.hub.docker.com/v1/repositories/clickhouse/clickhouse-server/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}' else - docker pull clickhouse/clickhouse-server:${param} + docker pull altinityinfra/clickhouse-server:${param} tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX) # older version require /nonexistent folder to exist to run clickhouse client :D chmod 777 ${tmp_dir} set -e - containerid=`docker run -v${tmp_dir}:/nonexistent -d clickhouse/clickhouse-server:${param}` + containerid=`docker run -v${tmp_dir}:/nonexistent -d altinityinfra/clickhouse-server:${param}` set +e while : do diff --git a/utils/zero_copy/zero_copy_schema_converter.py b/utils/zero_copy/zero_copy_schema_converter.py index 6103ac69c6e3..f80f36cecf94 100755 --- a/utils/zero_copy/zero_copy_schema_converter.py +++ b/utils/zero_copy/zero_copy_schema_converter.py @@ -33,7 +33,7 @@ def parse_args(): parser.add_argument( "-z", "--zcroot", - default="clickhouse/zero_copy", + default="altinityinfra/zero_copy", help="ZooKeeper node for new zero-copy data", ) parser.add_argument( From 9f5c0235a3202c44e83fe4648394f15401bf69b4 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 27 Oct 2023 13:27:27 +0200 Subject: [PATCH 002/111] Fixed some CI/CD issues --- .../runner/compose/docker_compose_jdbc_bridge.yml | 2 +- tests/ci/build_check.py | 12 +++++------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml index 807916860a15..1b02e282a21d 100644 --- a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml +++ b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml @@ -2,7 +2,7 @@ version: '2.3' services: bridge1: # NOTE(vnemkov): not produced by CI/CD, so must not be replaced with altinityinfra/jdbc-bridge - image: altinityinfra/jdbc-bridge + image: clickhouse/jdbc-bridge command: | /bin/bash -c 'cat << EOF > config/datasources/self.json { diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 5007ac815a97..f03bcf9e1e7a 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -28,7 +28,6 @@ from tee_popen import TeePopen from version_helper import ( ClickHouseVersion, - Git, get_version_from_repo, update_version_local, ) @@ -257,7 +256,6 @@ def main(): logging.info("Got version from repo %s", version.string) - official_flag = True version._flavour = version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX # TODO (vnemkov): right now we'll use simplified version management: @@ -283,7 +281,7 @@ def main(): # NOTE(vnemkov): since we still want to use CCACHE over SCCACHE, unlike upstream, # we need to create local directory for that, just as with 22.8 - ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache") + ccache_path = Path(CACHES_PATH, build_name + "_ccache") logging.info("Will try to fetch cache for our build") try: @@ -295,9 +293,9 @@ def main(): logging.info("Failed to get ccache, building without it. Error: %s", e) rmtree(ccache_path, ignore_errors=True) - if not os.path.exists(ccache_path): + if not ccache_path.exists(): logging.info("cache was not fetched, will create empty dir") - os.makedirs(ccache_path) + ccache_path.mkdir(parents=True) packager_cmd = get_packager_cmd( build_config, @@ -380,10 +378,10 @@ def main(): print(f"::notice ::Log URL: {log_url}") # TODO(vnemkov): make use of Path instead of string concatenation - src_path = os.path.join(TEMP_PATH, "build_source.src.tar.gz") + src_path = Path(TEMP_PATH, "build_source.src.tar.gz") s3_path = s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" logging.info("s3_path %s", s3_path) - if os.path.exists(src_path): + if src_path.exists(): src_url = s3_helper.upload_build_file_to_s3( src_path, s3_path ) From faa2391ff7fb028c4f53a509d6a85fea7792cbc8 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 27 Oct 2023 20:06:04 +0200 Subject: [PATCH 003/111] Fixed BuilderDebAarch64 --- .github/workflows/release_branches.yml | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 2fde382bfae6..5fed64f88487 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -198,28 +198,7 @@ jobs: with: clear-repository: true submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: fetch-depth: 0 # otherwise we will have no info about contributors - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - name: Build run: | sudo rm -fr "$TEMP_PATH" @@ -505,8 +484,6 @@ jobs: docker ps --quiet | xargs --no-run-if-empty docker kill ||: docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: sudo rm -fr "$TEMP_PATH" - - FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] runs-on: [self-hosted, func-tester, on-demand, type-cax41, image-arm-snapshot-docker_ipv6_arm] From 92ff59ce4beff7c372fba4fee408ee3777e255a6 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 27 Oct 2023 20:14:09 +0200 Subject: [PATCH 004/111] Fixed BuilderDebRelease --- tests/ci/build_check.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index f03bcf9e1e7a..dba8fcbf8540 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -286,7 +286,7 @@ def main(): logging.info("Will try to fetch cache for our build") try: get_ccache_if_not_exists( - ccache_path, s3_helper, pr_info.number, TEMP_PATH, pr_info.release_pr + ccache_path, s3_helper, pr_info.number, temp_path, pr_info.release_pr ) except Exception as e: # In case there are issues with ccache, remove the path and do not fail a build @@ -337,7 +337,7 @@ def main(): # Upload the ccache first to have the least build time in case of problems logging.info("Will upload cache") - upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH) + upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path) # FIXME performance performance_urls = [] @@ -377,8 +377,7 @@ def main(): print(f"::notice ::Log URL: {log_url}") - # TODO(vnemkov): make use of Path instead of string concatenation - src_path = Path(TEMP_PATH, "build_source.src.tar.gz") + src_path = temp_path / "build_source.src.tar.gz" s3_path = s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" logging.info("s3_path %s", s3_path) if src_path.exists(): From 0d43a2b601a3d0af506e98c3383c19b8f48fb529 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 7 Nov 2023 14:56:06 +0100 Subject: [PATCH 005/111] Fixed merge issue --- .github/workflows/release_branches.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9632f7707b1a..5d5ab4ed3076 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -201,9 +201,7 @@ jobs: clear-repository: true submodules: true filter: tree:0 - filter: tree:0 fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - name: Build run: | sudo rm -fr "$TEMP_PATH" From f456018c5d94e09b235bea52188a4dff974a6d1e Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 29 Nov 2023 11:23:45 +0100 Subject: [PATCH 006/111] Fixed regression tests staring jobs, which was maimed during the merge --- .github/workflows/release_branches.yml | 169 ++++++++++++++++++++++++- 1 file changed, 164 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0f37d1137267..9d8ffcf880c4 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -203,9 +203,7 @@ jobs: #################################### INSTALL PACKAGES ###################################### ############################################################################################ InstallPackagesTestRelease: - needs: [SignRelease] - timeout-minutes: 180 - runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] + needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) @@ -297,6 +295,7 @@ jobs: ## Not depending on the tests above since they can fail at any given moment. needs: [BuilderDebRelease, BuilderDebAarch64] runs-on: ubuntu-latest + steps: - run: true RegressionCommonAmd64: @@ -311,12 +310,22 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 - with: + with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=${{ matrix.SUITE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -333,10 +342,12 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - uses: actions/upload-artifact@v3 + if: always() with: name: ${{ env.SUITE }}-amd64-artifacts path: | @@ -361,13 +372,23 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=${{ matrix.SUITE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -384,6 +405,7 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -413,13 +435,23 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ontime_benchmark STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -444,6 +476,7 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -473,14 +506,24 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ontime_benchmark STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -505,6 +548,7 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -529,14 +573,24 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_COMMON_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=clickhouse_keeper STORAGE=/ssl artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -554,6 +608,7 @@ jobs: --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() @@ -576,14 +631,24 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_COMMON_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=clickhouse_keeper STORAGE=/ssl artifacts=builds + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -601,6 +666,7 @@ jobs: --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 - uses: actions/upload-artifact@v3 if: always() @@ -628,12 +694,22 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ldap/${{ matrix.SUITE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -650,6 +726,7 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -679,13 +756,23 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=ldap/${{ matrix.SUITE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -702,6 +789,7 @@ jobs: --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" --log raw.log - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -727,12 +815,22 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=parquet artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -759,6 +857,7 @@ jobs: --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -788,13 +887,23 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=s3 STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -819,6 +928,7 @@ jobs: --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -848,14 +958,24 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=s3 STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -880,6 +1000,7 @@ jobs: --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -909,13 +1030,23 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=tiered_storage STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -939,6 +1070,7 @@ jobs: --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --with-${{ matrix.STORAGE }} - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -968,14 +1100,24 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + steps: - name: Checkout regression repo uses: actions/checkout@v3 with: repository: Altinity/clickhouse-regression ref: ${{ env.REGRESSION_ARM_COMMIT }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=tiered_storage STORAGE=/${{ matrix.STORAGE }} artifacts=public + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -999,6 +1141,7 @@ jobs: --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --with-${{ matrix.STORAGE }} - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 env: artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA @@ -1020,19 +1163,28 @@ jobs: needs: [BuilderDebRelease] runs-on: [self-hosted, on-demand, type-cpx41, image-x86-app-docker-ce] timeout-minutes: 180 + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' TEMP_PATH=${{runner.temp}}/signed + REPORTS_PATH=${{runner.temp}}/reports_dir + EOF - name: Clear repository run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code uses: actions/checkout@v2 + - name: Download json reports uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} - name: Sign release env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} REPORTS_PATH: ${{ env.REPORTS_PATH }} - batches: 4 + run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes @@ -1040,6 +1192,12 @@ jobs: with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" ########################################################################################### ################################ FINISH CHECK ############################################# @@ -1055,6 +1213,7 @@ jobs: - FunctionalStatelessTestAarch64 - FunctionalStatefulTestRelease - FunctionalStatefulTestAarch64 + - IntegrationTestsRelease - CompatibilityCheck - RegressionCommonAmd64 - RegressionCommonAarch64 From 77f4c8df6e7f8cd2c9264c749f868f2448c3d674 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 29 Nov 2023 11:43:53 +0100 Subject: [PATCH 007/111] Fixing workflows --- .github/workflows/release_branches.yml | 27 ++++++++++++++------------ .github/workflows/reusable_build.yml | 5 +++++ .github/workflows/reusable_test.yml | 8 +++++++- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9d8ffcf880c4..8711f57bfe9a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -97,12 +97,13 @@ jobs: CompatibilityCheck: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] - timeout-minutes: 180 + # runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] + # timeout-minutes: 180 uses: ./.github/workflows/reusable_test.yml with: test_name: Compatibility check X86 runner_type: style-checker + timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions @@ -112,16 +113,17 @@ jobs: ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 + # runs-on: [self-hosted, builder, on-demand, type-cpx51, image-x86-app-docker-ce] + # timeout-minutes: 180 uses: ./.github/workflows/reusable_build.yml with: build_name: package_release checkout_depth: 0 + timeout_minutes: 180 BuilderDebAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder, on-demand, type-cax41, image-arm-app-docker-ce] + # runs-on: [self-hosted, builder, on-demand, type-cax41, image-arm-app-docker-ce] uses: ./.github/workflows/reusable_build.yml with: build_name: package_aarch64 @@ -166,12 +168,13 @@ jobs: needs: - BuilderDebRelease - BuilderDebAarch64 - runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] - timeout-minutes: 180 + # runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] + # timeout-minutes: 180 uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check runner_type: style-checker + timeout_minutes: 180 additional_envs: | NEEDS_DATA< 1 if: ${{ inputs.batches > 1 }} - runs-on: [self-hosted, style-checker-aarch64] + runs-on: ubuntu-latest #TODO(vnemkov): NO need for a beefy custom runner for a simple script + #runs-on: [self-hosted, style-checker-aarch64] outputs: batches: ${{steps.batches.outputs.batches}} steps: @@ -69,6 +74,7 @@ jobs: env: GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} runs-on: [self-hosted, '${{inputs.runner_type}}'] + timeout-minutes: ${{inputs.timeout_minutes}} needs: [PrepareStrategy] strategy: fail-fast: false # we always wait for entire matrix From 7583c2d9174d91a5d47153cee352b460ea3f6845 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 02:03:07 +0100 Subject: [PATCH 008/111] Attempt to set multiple runner tags via 'runner_type' input for 'reusable_build.yml' and 'reusable_tests.yml This is required to choose correct runner by tag in Altinity's infrastructure --- .github/workflows/release_branches.yml | 30 +++++++++----------------- .github/workflows/reusable_build.yml | 26 ++++++++++++++++++++-- .github/workflows/reusable_test.yml | 26 ++++++++++++++++++++-- 3 files changed, 58 insertions(+), 24 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 8711f57bfe9a..ecff074c1710 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -97,12 +97,10 @@ jobs: CompatibilityCheck: needs: [BuilderDebRelease] - # runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] - # timeout-minutes: 180 uses: ./.github/workflows/reusable_test.yml with: test_name: Compatibility check X86 - runner_type: style-checker + runner_type: "['self-hosted', 'style-checker', 'on-demand', 'type-cpx41', 'image-x86-app-docker-ce']" timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" @@ -113,21 +111,20 @@ jobs: ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - # runs-on: [self-hosted, builder, on-demand, type-cpx51, image-x86-app-docker-ce] - # timeout-minutes: 180 uses: ./.github/workflows/reusable_build.yml with: build_name: package_release checkout_depth: 0 timeout_minutes: 180 + runner_type: "['builder', 'on-demand', 'type-cpx51', 'image-x86-app-docker-ce']" BuilderDebAarch64: needs: [DockerHubPush] - # runs-on: [self-hosted, builder, on-demand, type-cax41, image-arm-app-docker-ce] uses: ./.github/workflows/reusable_build.yml with: build_name: package_aarch64 checkout_depth: 0 + runner_type: "['self-hosted', 'builder', 'on-demand', 'type-cax41', 'image-arm-app-docker-ce']" ############################################################################################ @@ -168,12 +165,10 @@ jobs: needs: - BuilderDebRelease - BuilderDebAarch64 - # runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] - # timeout-minutes: 180 uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check - runner_type: style-checker + runner_type: "['style-checker', 'on-demand', 'type-cpx31', 'image-x86-app-docker-ce']" timeout_minutes: 180 additional_envs: | NEEDS_DATA<> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + Build: name: Build-${{inputs.build_name}} + needs: runner_labels_setup + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + env: GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} - runs-on: [self-hosted, '${{inputs.runner_type}}'] timeout-minutes: ${{inputs.timeout_minutes}} steps: - name: Trust My Directory diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 8926aba1b37c..9193e467109d 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -49,6 +49,26 @@ env: CHECK_NAME: ${{inputs.test_name}} jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Make something like a JSON array if a single value, or leave as is if something like a JSON array + input="$(if [ "${input:0:1}" == '[' ]; then echo "$input"; else echo "['${input}']"; fi)" + + # Prepend 'self-hosted' to an JSON array + input="${input/\[/\[\'self-hosted\',}" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + PrepareStrategy: # batches < 1 is misconfiguration, # and we need this step only for batches > 1 @@ -73,9 +93,11 @@ jobs: name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} env: GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} - runs-on: [self-hosted, '${{inputs.runner_type}}'] + + needs: [PrepareStrategy, runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{inputs.timeout_minutes}} - needs: [PrepareStrategy] strategy: fail-fast: false # we always wait for entire matrix matrix: From 2cdae52fb8235e846288eb751def347590d22c10 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 09:51:53 +0100 Subject: [PATCH 009/111] Fixing version check in build_check.py --- .github/workflows/release_branches.yml | 2 ++ tests/ci/build_check.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ecff074c1710..01491953f9ea 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -117,6 +117,8 @@ jobs: checkout_depth: 0 timeout_minutes: 180 runner_type: "['builder', 'on-demand', 'type-cpx51', 'image-x86-app-docker-ce']" + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable BuilderDebAarch64: needs: [DockerHubPush] diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 4946c9450136..85e95d210462 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -257,7 +257,7 @@ def main(): logging.info("Got version from repo %s", version.string) official_flag = True - version._flavour = version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX + # version._flavour = version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX # TODO (vnemkov): right now we'll use simplified version management: # only update git hash and explicitly set stable version suffix. # official_flag = pr_info.number == 0 From 04e33bc65655f2511c32e86f9b76a3ed44bfac2c Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 10:24:56 +0100 Subject: [PATCH 010/111] Simplified multi-value 'runner_type' --- .github/workflows/release_branches.yml | 20 ++++++++++---------- .github/workflows/reusable_build.yml | 11 ++++++----- .github/workflows/reusable_test.yml | 9 +++++---- 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 01491953f9ea..8ab3bc6dfe95 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -100,7 +100,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Compatibility check X86 - runner_type: "['self-hosted', 'style-checker', 'on-demand', 'type-cpx41', 'image-x86-app-docker-ce']" + runner_type: style-checker, on-demand, type-cpx4, image-x86-app-docker-ce timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" @@ -116,7 +116,7 @@ jobs: build_name: package_release checkout_depth: 0 timeout_minutes: 180 - runner_type: "['builder', 'on-demand', 'type-cpx51', 'image-x86-app-docker-ce']" + runner_type: builder, on-demand, type-cpx51, image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -126,7 +126,7 @@ jobs: with: build_name: package_aarch64 checkout_depth: 0 - runner_type: "['self-hosted', 'builder', 'on-demand', 'type-cax41', 'image-arm-app-docker-ce']" + runner_type: builder, on-demand, type-cax41, image-arm-app-docker-ce ############################################################################################ @@ -170,7 +170,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check - runner_type: "['style-checker', 'on-demand', 'type-cpx31', 'image-x86-app-docker-ce']" + runner_type: style-checker, on-demand, type-cpx31, image-x86-app-docker-ce timeout_minutes: 180 additional_envs: | NEEDS_DATA<> ${GITHUB_OUTPUT} env: diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 9193e467109d..5abbca999232 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -59,11 +59,12 @@ jobs: name: Prepare runner_labels variables for the later steps run: | - # Make something like a JSON array if a single value, or leave as is if something like a JSON array - input="$(if [ "${input:0:1}" == '[' ]; then echo "$input"; else echo "['${input}']"; fi)" + # Prepend self-hosted + input="self-hosted, ${input}" - # Prepend 'self-hosted' to an JSON array - input="${input/\[/\[\'self-hosted\',}" + # Make something like a JSON array from comma-separated list + input="['${input}']" + input="${input//\,/\'\, \'}" echo "runner_labels=$input" >> ${GITHUB_OUTPUT} env: From 0f58fa123d6e14e381c6e4f76cd4ad6303dcd1ec Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 10:38:36 +0100 Subject: [PATCH 011/111] Fixed build_check.py --- tests/ci/build_check.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 85e95d210462..e0bd37181a34 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -265,8 +265,7 @@ def main(): # if "release" in pr_info.labels or "release-lts" in pr_info.labels: # version_type = CLICKHOUSE_STABLE_VERSION_SUFFIX # official_flag = True - - update_version_local(version, version_type) + # update_version_local(version, version_type) logging.info(f"Updated local files with version : {version.string} / {version.describe}") From f5b37f8f7f214f76bd0fcac8e7e4fda7f26e2f68 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 16:08:57 +0100 Subject: [PATCH 012/111] Removing whitespace from input runner tag string. --- .github/workflows/reusable_build.yml | 2 ++ .github/workflows/reusable_test.yml | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 39e8dbaaf7b9..7aa27224689f 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -44,6 +44,8 @@ jobs: # Prepend self-hosted input="self-hosted, ${input}" + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" # Make something like a JSON array from comma-separated list input="['${input}']" input="${input//\,/\'\, \'}" diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 5abbca999232..6ac44687cac4 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -10,7 +10,7 @@ name: Testing workflow required: true type: string runner_type: - description: the label of runner to use + description: the label of runner to use, can be a simple string or a comma-separated list required: true type: string timeout_minutes: @@ -62,6 +62,8 @@ jobs: # Prepend self-hosted input="self-hosted, ${input}" + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" # Make something like a JSON array from comma-separated list input="['${input}']" input="${input//\,/\'\, \'}" From 4ee2017a482cbe4c32955c9619aa88ca53de291a Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 30 Nov 2023 16:12:46 +0100 Subject: [PATCH 013/111] A bit of debugging for runner tags --- .github/workflows/reusable_build.yml | 5 +++++ .github/workflows/reusable_test.yml | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 7aa27224689f..15643059df09 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -63,6 +63,11 @@ jobs: GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} timeout-minutes: ${{inputs.timeout_minutes}} steps: + - name: Debug input runer tag names + run: | + cat < Date: Thu, 30 Nov 2023 10:05:57 -0800 Subject: [PATCH 014/111] Larger Builder Instances --- .github/workflows/release_branches.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 8ab3bc6dfe95..1708e86a85fd 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -35,7 +35,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-fsn1, image-arm-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true - name: Images check @@ -52,7 +52,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true - name: Images check @@ -70,7 +70,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags @@ -116,7 +116,7 @@ jobs: build_name: package_release checkout_depth: 0 timeout_minutes: 180 - runner_type: builder, on-demand, type-cpx51, image-x86-app-docker-ce + runner_type: builder, on-demand, type-ccx53, image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -126,7 +126,7 @@ jobs: with: build_name: package_aarch64 checkout_depth: 0 - runner_type: builder, on-demand, type-cax41, image-arm-app-docker-ce + runner_type: builder, on-demand, type-ccx53, image-x86-app-docker-ce ############################################################################################ @@ -140,7 +140,7 @@ jobs: timeout-minutes: 180 steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -191,7 +191,7 @@ jobs: timeout-minutes: 180 steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true - name: Mark Commit Release Ready @@ -1227,7 +1227,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb with: clear-repository: true - name: Finish label From 02723aeb6ebbfdd2d8867e4f4689bf344ecf5d35 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 17:00:26 -0800 Subject: [PATCH 015/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 15643059df09..234c181c6b52 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -14,12 +14,12 @@ name: Build ClickHouse required: true type: string checkout_depth: - description: the value of the git shallow checkout + description: the value of the git shallow checkout. required: false type: number default: 1 runner_type: - description: the label of runner to use, can be a simple string or a comma-separated list + description: the label of runner to use, can be a simple string or a comma-separated list. default: builder type: string timeout_minutes: @@ -27,8 +27,21 @@ name: Build ClickHouse default: 120 type: number additional_envs: - description: additional ENV variables to setup the job + description: additional ENV variables to setup the job. type: string + secrets: + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_SECRET_KEY_ID: + description: the access key id to the aws param store. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true jobs: runner_labels_setup: From 20fad9918156e35d4971b202f375f7507d8d8f05 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 17:00:44 -0800 Subject: [PATCH 016/111] Update reusable_test.yml --- .github/workflows/reusable_test.yml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index c950f2e94bf5..8b2a6e739c1e 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -42,7 +42,19 @@ name: Testing workflow secret_envs: description: if given, it's passed to the environments required: false - + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_SECRET_KEY_ID: + description: the access key id to the aws param store. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true + env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 From 8b584a9d5562b904297ce9f233d84208e98973c6 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 17:02:20 -0800 Subject: [PATCH 017/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 1708e86a85fd..00740decaba6 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -98,6 +98,7 @@ jobs: CompatibilityCheck: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Compatibility check X86 runner_type: style-checker, on-demand, type-cpx4, image-x86-app-docker-ce @@ -112,6 +113,7 @@ jobs: BuilderDebRelease: needs: [DockerHubPush] uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_release checkout_depth: 0 @@ -123,6 +125,7 @@ jobs: BuilderDebAarch64: needs: [DockerHubPush] uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_aarch64 checkout_depth: 0 @@ -168,6 +171,7 @@ jobs: - BuilderDebRelease - BuilderDebAarch64 uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: ClickHouse build check runner_type: style-checker, on-demand, type-cpx31, image-x86-app-docker-ce @@ -205,6 +209,7 @@ jobs: InstallPackagesTestRelease: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Install packages (amd64) runner_type: style-checker, on-demand, type-cpx51, image-x86-app-docker-ce @@ -219,6 +224,7 @@ jobs: FunctionalStatelessTestRelease: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (release) runner_type: func-tester, on-demand, type-cpx51, image-x86-snapshot-docker_ipv6_x86 @@ -231,6 +237,7 @@ jobs: FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (aarch64) runner_type: func-tester, on-demand, type-cax41, image-arm-snapshot-docker_ipv6_arm @@ -247,6 +254,7 @@ jobs: FunctionalStatefulTestRelease: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (release) runner_type: func-tester, on-demand, type-cpx51, image-x86-snapshot-docker_ipv6_x86 @@ -259,6 +267,7 @@ jobs: FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (aarch64) runner_type: func-tester, on-demand, type-cax41, image-arm-snapshot-docker_ipv6_arm @@ -275,6 +284,7 @@ jobs: IntegrationTestsRelease: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Integration tests (release) runner_type: stress-tester, func-tester From 33b5ac1d958d6342a55919fbd9e3b88667c05d2d Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:28:29 -0800 Subject: [PATCH 018/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 234c181c6b52..6b97f045a04b 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -33,9 +33,12 @@ name: Build ClickHouse AWS_SECRET_ACCESS_KEY: description: the access key to the aws param store. required: true - AWS_SECRET_KEY_ID: + AWS_ACCESS_KEY_ID: description: the access key id to the aws param store. required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true DOCKER_USERNAME: description: username of the docker user. required: true From 3afdbac2b87ae6a277a2b9ab67191647edab7dd8 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:28:44 -0800 Subject: [PATCH 019/111] Update reusable_test.yml --- .github/workflows/reusable_test.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 8b2a6e739c1e..6a26a82d2c3c 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -45,9 +45,12 @@ name: Testing workflow AWS_SECRET_ACCESS_KEY: description: the access key to the aws param store. required: true - AWS_SECRET_KEY_ID: + AWS_ACCESS_KEY_ID: description: the access key id to the aws param store. required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true DOCKER_USERNAME: description: username of the docker user. required: true From 4aef2ebed9bce0f897c7c99f588de27ccb8ad60c Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:38:00 -0800 Subject: [PATCH 020/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 00740decaba6..3e9a14e0e1d1 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -35,7 +35,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-fsn1, image-arm-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check @@ -52,7 +52,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check @@ -70,7 +70,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags @@ -143,7 +143,7 @@ jobs: timeout-minutes: 180 steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -195,7 +195,7 @@ jobs: timeout-minutes: 180 steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Mark Commit Release Ready @@ -1237,7 +1237,7 @@ jobs: runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@4c313c3667805fcab2e812c9ef8689c0bf4cbafb + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label From 90e1ed519a85e0907427a170b18c06b57b0fb80f Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:18:42 -0800 Subject: [PATCH 021/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 6b97f045a04b..b7c31740a273 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -46,6 +46,13 @@ name: Build ClickHouse description: password to the docker user. required: true +env: + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + jobs: runner_labels_setup: name: Compute proper runner labels for the rest of the jobs From 862bd2358830bea0df14f78414f034a38101eb99 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:18:59 -0800 Subject: [PATCH 022/111] Update reusable_test.yml --- .github/workflows/reusable_test.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 6a26a82d2c3c..f8b0552b5cbf 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -62,6 +62,11 @@ env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 CHECK_NAME: ${{inputs.test_name}} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} jobs: runner_labels_setup: From e9fe8fcc25f19becb5b8b59fa40e0f4b5b2c6581 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:20:11 -0800 Subject: [PATCH 023/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index b7c31740a273..b024ade46246 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -94,7 +94,7 @@ jobs: - name: Trust My Directory run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: true From 282c310071e9eda6d25ab1ff6a4a80104be15b85 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:20:29 -0800 Subject: [PATCH 024/111] Update reusable_test.yml --- .github/workflows/reusable_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index f8b0552b5cbf..383c78f6487d 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -135,7 +135,7 @@ jobs: inputs: ${{toJson(inputs) }} JSON - name: Check out repository code - uses: Altinity/checkout@34c90ad008fe1469182f3637d3de4766aa10a5c7 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: ${{inputs.submodules}} From aa1be57a53dc28a90aba0ea2d64ae0ecbfcae822 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:22:05 -0800 Subject: [PATCH 025/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index b024ade46246..2fb5bf5739dd 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -1,10 +1,6 @@ ### For the pure soul wishes to move it to another place # https://github.com/orgs/community/discussions/9050 -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - name: Build ClickHouse 'on': workflow_call: @@ -47,6 +43,8 @@ name: Build ClickHouse required: true env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} From c3bbdf873d9bbcbdf047174d74b5feb20e04fba1 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 1 Dec 2023 11:55:48 -0800 Subject: [PATCH 026/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 3e9a14e0e1d1..df47aadf885d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -101,7 +101,7 @@ jobs: secrets: inherit with: test_name: Compatibility check X86 - runner_type: style-checker, on-demand, type-cpx4, image-x86-app-docker-ce + runner_type: style-checker, on-demand, type-cpx41, image-x86-app-docker-ce timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" From 7af6e9ddefeb9b78f1400c204181eda9ccb13943 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 7 Dec 2023 22:36:02 +0100 Subject: [PATCH 027/111] Using docker registry for pulling all test images Set `dockerhub-proxy.dockerhub-proxy-zone:5000` as a caching pull-through docker registry for all tests actions. --- .github/workflows/reusable_test.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 383c78f6487d..24c4f773b484 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -155,16 +155,18 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Docker IPv6 configuration + - name: Docker configuration shell: bash run: | - # make sure docker uses proper IPv6 config + # make sure docker uses proper IPv6 config and registry for caching sudo touch /etc/docker/daemon.json sudo chown ubuntu:ubuntu /etc/docker/daemon.json sudo cat < /etc/docker/daemon.json { "ipv6": true, "fixed-cidr-v6": "2001:3984:3989::/64" + "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"], + "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] } EOT sudo chown root:root /etc/docker/daemon.json From 885f0a77606355dd2d8ba0938bb1649f36f6ddf8 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 7 Dec 2023 22:37:31 +0100 Subject: [PATCH 028/111] Update reusable_test.yml --- .github/workflows/reusable_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 24c4f773b484..39ac5200aabe 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -164,7 +164,7 @@ jobs: sudo cat < /etc/docker/daemon.json { "ipv6": true, - "fixed-cidr-v6": "2001:3984:3989::/64" + "fixed-cidr-v6": "2001:3984:3989::/64", "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"], "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] } From faa980809d8e93c9d6744ad91b70aa5078028727 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Sun, 10 Dec 2023 22:01:06 +0100 Subject: [PATCH 029/111] Common docker setup action, to use caching docker registry everywhere --- .github/actions/docker_setup/action.yml | 32 +++++++++++++++++++++++++ .github/workflows/release_branches.yml | 6 +++++ .github/workflows/reusable_build.yml | 2 ++ .github/workflows/reusable_test.yml | 20 +++------------- 4 files changed, 43 insertions(+), 17 deletions(-) create mode 100644 .github/actions/docker_setup/action.yml diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml new file mode 100644 index 000000000000..56ec56026879 --- /dev/null +++ b/.github/actions/docker_setup/action.yml @@ -0,0 +1,32 @@ +name: Docker setup +description: Setup docker +inputs: + nested_job: + description: the fuse for unintended use inside of the reusable callable jobs + default: true + type: boolean +runs: + using: "composite" + steps: + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64", + "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000", "65.108.242.32:5000"], + "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000", "http://65.108.242.32:5000"] + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl info docker + # Print info about registry + docker info + # Check if remote docker proxy is accessible + ping -c 10 65.108.242.32 + ping -c 10 dockerhub-proxy.dockerhub-proxy-zone diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index df47aadf885d..cc04d8d1e8e8 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -38,6 +38,8 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Common docker setup + uses: ./.github/actions/docker_setup - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" @@ -55,6 +57,8 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Common docker setup + uses: ./.github/actions/docker_setup - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" @@ -75,6 +79,8 @@ jobs: clear-repository: true fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags filter: tree:0 + - name: Common docker setup + uses: ./.github/actions/docker_setup - name: Download changed aarch64 images uses: actions/download-artifact@v3 with: diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 2fb5bf5739dd..f8c223420e56 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -118,6 +118,8 @@ jobs: uses: ./.github/actions/common_setup with: job_type: build_check + - name: Common docker setup + uses: ./.github/actions/docker_setup - name: Download changed images uses: actions/download-artifact@v3 with: diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 39ac5200aabe..07cd4bda2015 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -57,7 +57,7 @@ name: Testing workflow DOCKER_PASSWORD: description: password to the docker user. required: true - + env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 @@ -155,22 +155,8 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - - name: Docker configuration - shell: bash - run: | - # make sure docker uses proper IPv6 config and registry for caching - sudo touch /etc/docker/daemon.json - sudo chown ubuntu:ubuntu /etc/docker/daemon.json - sudo cat < /etc/docker/daemon.json - { - "ipv6": true, - "fixed-cidr-v6": "2001:3984:3989::/64", - "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"], - "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] - } - EOT - sudo chown root:root /etc/docker/daemon.json - sudo systemctl restart docker + - name: Docker setup + uses: ./.github/actions/docker_setup - name: Setup batch if: ${{ inputs.batches > 1}} run: | From 98a608649b00084afed54a6c0ad2b3a8f6eda6d4 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Sun, 10 Dec 2023 22:10:16 +0100 Subject: [PATCH 030/111] Update action.yml --- .github/actions/docker_setup/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index 56ec56026879..ce52084988c6 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -26,7 +26,7 @@ runs: sudo systemctl restart docker sudo systemctl info docker # Print info about registry - docker info + docker --version # Check if remote docker proxy is accessible ping -c 10 65.108.242.32 ping -c 10 dockerhub-proxy.dockerhub-proxy-zone From f8a68115da0a61c9c173384fd560a60eb8cf9e60 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Sun, 10 Dec 2023 22:15:28 +0100 Subject: [PATCH 031/111] Update action.yml --- .github/actions/docker_setup/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index ce52084988c6..713d809b6718 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -24,9 +24,9 @@ runs: EOT sudo chown root:root /etc/docker/daemon.json sudo systemctl restart docker - sudo systemctl info docker + sudo systemctl status docker # Print info about registry - docker --version + docker info # Check if remote docker proxy is accessible ping -c 10 65.108.242.32 ping -c 10 dockerhub-proxy.dockerhub-proxy-zone From ddb289fb0d0de86d2cb18769ef0a29c15937c819 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Sun, 10 Dec 2023 23:32:29 +0100 Subject: [PATCH 032/111] Update action.yml --- .github/actions/docker_setup/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index 713d809b6718..5aa9f7d0d087 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -29,4 +29,4 @@ runs: docker info # Check if remote docker proxy is accessible ping -c 10 65.108.242.32 - ping -c 10 dockerhub-proxy.dockerhub-proxy-zone + # ping -c 10 dockerhub-proxy.dockerhub-proxy-zone From 0ca958d2e2df5b34b74103c32c9d6767fcad0f5b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 00:32:18 +0100 Subject: [PATCH 033/111] No docker proxy for DockerHubPush-X jobs --- .github/workflows/release_branches.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index cc04d8d1e8e8..4684b638ef29 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -38,8 +38,8 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - - name: Common docker setup - uses: ./.github/actions/docker_setup +# - name: Common docker setup +# uses: ./.github/actions/docker_setup - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" @@ -57,8 +57,8 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - - name: Common docker setup - uses: ./.github/actions/docker_setup +# - name: Common docker setup +# uses: ./.github/actions/docker_setup - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" From 503ba506602ef97133a19e09aaba9dc703982be3 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 13:12:31 +0100 Subject: [PATCH 034/111] Attempt to docker login from docker_setup/action.yml Passing secrets from surrounding context, lets see how it works --- .github/actions/docker_setup/action.yml | 10 ++++++++++ .github/workflows/release_branches.yml | 6 ++++++ .github/workflows/reusable_build.yml | 11 +++++++++++ .github/workflows/reusable_test.yml | 12 ++++++++++-- 4 files changed, 37 insertions(+), 2 deletions(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index 5aa9f7d0d087..83bd8a8ea374 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -5,6 +5,13 @@ inputs: description: the fuse for unintended use inside of the reusable callable jobs default: true type: boolean +secrets: + DOCKER_USERNAME: + description: username for the dockerhub login + required: true + DOCKER_PASSWORD: + description: password for the dockerhub login + required: true runs: using: "composite" steps: @@ -30,3 +37,6 @@ runs: # Check if remote docker proxy is accessible ping -c 10 65.108.242.32 # ping -c 10 dockerhub-proxy.dockerhub-proxy-zone + - name: Docker login + shell: bash + run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4684b638ef29..f377c56b3ca1 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -79,22 +79,28 @@ jobs: clear-repository: true fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags filter: tree:0 + - name: Common docker setup uses: ./.github/actions/docker_setup + secrets: inherit + - name: Download changed aarch64 images uses: actions/download-artifact@v3 with: name: changed_images_aarch64 path: ${{ runner.temp }} + - name: Download changed amd64 images uses: actions/download-artifact@v3 with: name: changed_images_amd64 path: ${{ runner.temp }} + - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 + - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index f8c223420e56..546b113167a7 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -89,8 +89,10 @@ jobs: cat <> "$GITHUB_ENV" << 'EOF' ${{inputs.additional_envs}} EOF python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV" + - name: Apply sparse checkout for contrib # in order to check that it doesn't break build # This step is done in GITHUB_WORKSPACE, # because it's broken in REPO_COPY for some reason @@ -114,26 +118,33 @@ jobs: "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' du -hs "$GITHUB_WORKSPACE/contrib" ||: find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: + - name: Common setup uses: ./.github/actions/common_setup with: job_type: build_check + - name: Common docker setup uses: ./.github/actions/docker_setup + secrets: inherit + - name: Download changed images uses: actions/download-artifact@v3 with: name: changed_images path: ${{ env.IMAGES_PATH }} + - name: Build run: | cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" + - name: Upload build URLs to artifacts if: ${{ success() || failure() }} uses: actions/upload-artifact@v3 with: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + - name: Clean if: always() uses: ./.github/actions/clean diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 07cd4bda2015..4cbfad075712 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -85,8 +85,7 @@ jobs: # Remove all whitespace input="$(echo ${input} | tr -d [:space:])" # Make something like a JSON array from comma-separated list - input="['${input}']" - input="${input//\,/\'\, \'}" + input="[ '${input//\,/\'\, \'}' ]" echo "runner_labels=$input" >> ${GITHUB_OUTPUT} env: @@ -134,6 +133,7 @@ jobs: cat <> "$GITHUB_ENV" << 'EOF' ${{inputs.additional_envs}} ${{secrets.secret_envs}} EOF + - name: Common setup uses: ./.github/actions/common_setup with: job_type: test + - name: Download json reports uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} + - name: Docker setup uses: ./.github/actions/docker_setup + secrets: inherit + - name: Setup batch if: ${{ inputs.batches > 1}} run: | @@ -164,8 +170,10 @@ jobs: RUN_BY_HASH_NUM=${{matrix.batch}} RUN_BY_HASH_TOTAL=${{inputs.batches}} EOF + - name: Run test run: ${{inputs.run_command}} + - name: Clean if: always() uses: ./.github/actions/clean From 2db2b2971a45d6f5f64c3dd13faf7c0415e88ac8 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 13:14:17 +0100 Subject: [PATCH 035/111] Attempt to do log in sooner --- .github/workflows/release_branches.yml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f377c56b3ca1..46e1b5409a80 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -38,8 +38,11 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true -# - name: Common docker setup -# uses: ./.github/actions/docker_setup + + - name: Common docker setup + uses: ./.github/actions/docker_setup + secrets: inherit + - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" @@ -57,8 +60,11 @@ jobs: uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true -# - name: Common docker setup -# uses: ./.github/actions/docker_setup + + - name: Common docker setup + uses: ./.github/actions/docker_setup + secrets: inherit + - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" From ba0de7fb4d39a5d1fb14e38fa5a5be1d6a8e4c06 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 13:18:33 +0100 Subject: [PATCH 036/111] A bit more logging --- .github/actions/docker_setup/action.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index 83bd8a8ea374..3c3ef3070fc7 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -34,9 +34,8 @@ runs: sudo systemctl status docker # Print info about registry docker info - # Check if remote docker proxy is accessible - ping -c 10 65.108.242.32 - # ping -c 10 dockerhub-proxy.dockerhub-proxy-zone - name: Docker login shell: bash - run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} + run: | + docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} + docker info From 94f8a74fb186f4d5565e444cc9ee538458c86b6a Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 13:30:45 +0100 Subject: [PATCH 037/111] Alternative way to pass secrets --- .github/actions/docker_setup/action.yml | 5 +++-- .github/workflows/release_branches.yml | 12 +++++++++--- .github/workflows/reusable_build.yml | 4 +++- .github/workflows/reusable_test.yml | 4 +++- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index 3c3ef3070fc7..ed5afe4ee1bd 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -5,13 +5,14 @@ inputs: description: the fuse for unintended use inside of the reusable callable jobs default: true type: boolean -secrets: DOCKER_USERNAME: description: username for the dockerhub login required: true + type: string DOCKER_PASSWORD: description: password for the dockerhub login required: true + type: string runs: using: "composite" steps: @@ -37,5 +38,5 @@ runs: - name: Docker login shell: bash run: | - docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} + docker login -u ${{ inputs.DOCKER_USERNAME }} -p ${{ inputs.DOCKER_PASSWORD }} docker info diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 46e1b5409a80..e90c935eb93b 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -41,7 +41,9 @@ jobs: - name: Common docker setup uses: ./.github/actions/docker_setup - secrets: inherit + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Images check run: | @@ -63,7 +65,9 @@ jobs: - name: Common docker setup uses: ./.github/actions/docker_setup - secrets: inherit + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Images check run: | @@ -88,7 +92,9 @@ jobs: - name: Common docker setup uses: ./.github/actions/docker_setup - secrets: inherit + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Download changed aarch64 images uses: actions/download-artifact@v3 diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 546b113167a7..4204798f9861 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -126,7 +126,9 @@ jobs: - name: Common docker setup uses: ./.github/actions/docker_setup - secrets: inherit + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Download changed images uses: actions/download-artifact@v3 diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 4cbfad075712..c20f32bc9f88 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -161,7 +161,9 @@ jobs: - name: Docker setup uses: ./.github/actions/docker_setup - secrets: inherit + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Setup batch if: ${{ inputs.batches > 1}} From 6c5828a4d77991e319b5b9143e9d35d46cc16e69 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 16:17:14 +0100 Subject: [PATCH 038/111] Not using registry + a bit more logging for when we are building docker images --- .github/actions/docker_setup/action.yml | 6 +----- tests/ci/docker_images_check.py | 6 ++++++ 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml index ed5afe4ee1bd..2748ce1aecdb 100644 --- a/.github/actions/docker_setup/action.yml +++ b/.github/actions/docker_setup/action.yml @@ -25,16 +25,12 @@ runs: sudo cat < /etc/docker/daemon.json { "ipv6": true, - "fixed-cidr-v6": "2001:3984:3989::/64", - "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000", "65.108.242.32:5000"], - "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000", "http://65.108.242.32:5000"] + "fixed-cidr-v6": "2001:3984:3989::/64" } EOT sudo chown root:root /etc/docker/daemon.json sudo systemctl restart docker sudo systemctl status docker - # Print info about registry - docker info - name: Docker login shell: bash run: | diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index de33a9525dd0..c923d0e7c980 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -397,6 +397,7 @@ def main(): changed_json = TEMP_PATH / "changed_images.json" if args.push: + logging.info('Doing docker login') subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'altinityinfra' --password-stdin", input=get_parameter_from_ssm("dockerhub-password"), @@ -404,6 +405,11 @@ def main(): shell=True, ) + loggin.info('Docker info: %s, ', subprocess.check_output( # pylint: disable=unexpected-keyword-arg + "docker info", + encoding="utf-8", + shell=True, + )) images_dict = get_images_dict(Path(REPO_COPY), IMAGES_FILE_PATH) pr_info = PRInfo() From 88d7ea8aec75f545d11625ffc7df73423cefc369 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 16:31:58 +0100 Subject: [PATCH 039/111] Fixed typo + more logging --- tests/ci/docker_images_check.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index c923d0e7c980..abcfd1379b3a 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -397,6 +397,12 @@ def main(): changed_json = TEMP_PATH / "changed_images.json" if args.push: + logging.info('Docker info BEFORE logging in: %s, ', subprocess.check_output( # pylint: disable=unexpected-keyword-arg + "docker info", + encoding="utf-8", + shell=True, + )) + logging.info('Doing docker login') subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'altinityinfra' --password-stdin", @@ -405,7 +411,7 @@ def main(): shell=True, ) - loggin.info('Docker info: %s, ', subprocess.check_output( # pylint: disable=unexpected-keyword-arg + logging.info('Docker info: %s, ', subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker info", encoding="utf-8", shell=True, From 94f4b13550bef66965dcc2c2018e95392ff5b9b0 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 11 Dec 2023 20:26:31 +0100 Subject: [PATCH 040/111] Attempt to fix integration tests Using different CIDR for IPv6 in docker Using docker registry for caching --- docker/test/integration/runner/dockerd-entrypoint.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/test/integration/runner/dockerd-entrypoint.sh b/docker/test/integration/runner/dockerd-entrypoint.sh index f4fb84acbe18..0f9c1fa8ed9b 100755 --- a/docker/test/integration/runner/dockerd-entrypoint.sh +++ b/docker/test/integration/runner/dockerd-entrypoint.sh @@ -4,12 +4,12 @@ set -e mkdir -p /etc/docker/ echo '{ "ipv6": true, - "fixed-cidr-v6": "fd00::/8", + "fixed-cidr-v6": "2001:db8:1::/64", "ip-forward": true, "log-level": "debug", "storage-driver": "overlay2", - "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"], - "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] + "insecure-registries" : ["65.108.242.32:5000"], + "registry-mirrors" : ["http://65.108.242.32:5000"] }' | dd of=/etc/docker/daemon.json 2>/dev/null if [ -f /sys/fs/cgroup/cgroup.controllers ]; then From f9c75b3becd8cb060008d34519136f230378beb2 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 12 Dec 2023 15:07:24 +0100 Subject: [PATCH 041/111] Minor fixes and a bit more logging --- tests/ci/clickhouse_helper.py | 26 +++++++++++++------------- tests/integration/helpers/cluster.py | 6 ++++++ 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py index 1e1c50c892be..90a40fbaf737 100644 --- a/tests/ci/clickhouse_helper.py +++ b/tests/ci/clickhouse_helper.py @@ -170,19 +170,19 @@ def select_json_each_row(self, db, query, query_params=None): # Obtain the machine type from IMDS: def get_instance_type(): - url = "http://169.254.169.254/latest/meta-data/instance-type" - for i in range(5): - try: - response = requests.get(url, timeout=1) - if response.status_code == 200: - return response.text - except Exception as e: - error = ( - f"Received exception while sending data to {url} on {i} attempt: {e}" - ) - logging.warning(error) - continue - return "" + # url = "http://169.254.169.254/latest/meta-data/instance-type" + # for i in range(5): + # try: + # response = requests.get(url, timeout=1) + # if response.status_code == 200: + # return response.text + # except Exception as e: + # error = ( + # f"Received exception while sending data to {url} on {i} attempt: {e}" + # ) + # logging.warning(error) + # continue + return "Altinity runner" def prepare_tests_results_for_clickhouse( diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index 1a98a366a7a0..4282f1895ac2 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -719,6 +719,12 @@ def redis_port(self): return self._redis_port def print_all_docker_pieces(self): + logging.debug("!!! Docker info: %s", subprocess.check_output( + "docker info", + shell=True, + universal_newlines=True, + )) + res_networks = subprocess.check_output( f"docker network ls --filter name='{self.project_name}*'", shell=True, From cbc325151d759921b5fd7d8bb9ab45037985c2da Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 12 Dec 2023 17:53:56 +0100 Subject: [PATCH 042/111] Specified versions of installed software based on upstream's 332-02d291a41a22d29b47e5d1f2e3058cfb66042ce1 images --- docker/test/integration/base/Dockerfile | 48 ++++---- .../integration/helper_container/Dockerfile | 2 +- docker/test/integration/runner/Dockerfile | 112 +++++++++--------- 3 files changed, 81 insertions(+), 81 deletions(-) diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index caf0f38bec4e..82dfc51162f0 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -7,33 +7,33 @@ SHELL ["/bin/bash", "-c"] RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get -y install \ - bsdutils \ - curl \ - default-jre \ - g++ \ - gdb \ - iproute2 \ - krb5-user \ - libicu-dev \ - libsqlite3-dev \ - libsqliteodbc \ - lsof \ - lz4 \ - odbc-postgresql \ - odbcinst \ - python3 \ - rpm2cpio \ - sqlite3 \ - tar \ - tzdata \ - unixodbc \ - python3-pip \ - libcurl4-openssl-dev \ - libssl-dev \ + bsdutils=1:2.37.2-4ubuntu3 \ + curl=7.81.0-1ubuntu1.14 \ + default-jre=2:1.11-72build2 \ + g++=4:11.2.0-1ubuntu1 \ + gdb=12.1-0ubuntu1~22.04 \ + iproute2=5.15.0-1ubuntu2 \ + krb5-user=1.19.2-2ubuntu0.3 \ + libicu-dev=70.1-2 \ + libsqlite3-dev=3.37.2-2ubuntu0.1 \ + libsqliteodbc=0.9998-3 \ + lsof=4.93.2+dfsg-1.1build2 \ + lz4=1.9.3-2build2 \ + odbc-postgresql=1:13.02.0000-2 \ + odbcinst=2.3.9-5 \ + python3=3.10.6-1~22.04 \ + rpm2cpio=4.17.0+dfsg1-4build1 \ + sqlite3=3.37.2-2ubuntu0.1 \ + tar=1.34+dfsg-1ubuntu0.1.22.04.1 \ + tzdata=2023c-0ubuntu0.22.04.2 \ + unixodbc=2.3.9-5 \ + python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + libcurl4-openssl-dev=7.81.0-1ubuntu1.14 \ + libssl-dev=3.0.2-0ubuntu1.12 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -RUN pip3 install pycurl +RUN pip3 install pycurl=7.45.2 # Architecture of the image when BuildKit/buildx is used ARG TARGETARCH diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index 60adaea17961..eb2d6e4c439a 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -2,7 +2,7 @@ # Helper docker container to run iptables without sudo FROM alpine -RUN apk add --no-cache -U iproute2 \ +RUN apk add --no-cache -U iproute2=6.3.0-r0 \ && for bin in iptables iptables-restore iptables-save; \ do ln -sf xtables-nft-multi "/sbin/$bin"; \ done diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index d42fcb9baf67..27b424e4e03b 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -8,35 +8,35 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ - adduser \ - ca-certificates \ - bash \ - btrfs-progs \ - e2fsprogs \ - iptables \ - xfsprogs \ - tar \ - pigz \ - wget \ - git \ - iproute2 \ - cgroupfs-mount \ - python3-pip \ - tzdata \ - libicu-dev \ - bsdutils \ - curl \ - python3-pika \ - liblua5.1-dev \ - luajit \ - libssl-dev \ - libcurl4-openssl-dev \ - gdb \ - default-jdk \ - software-properties-common \ - libkrb5-dev \ - krb5-user \ - g++ \ + adduser=3.118ubuntu5 \ + ca-certificates=20230311ubuntu0.22.04.1 \ + bash=5.1-6ubuntu1 \ + btrfs-progs=5.16.2-1 \ + e2fsprogs=1.46.5-2ubuntu1.1 \ + iptables=1.8.7-1ubuntu5.1 \ + xfsprogs=5.13.0-1ubuntu2 \ + tar=1.34+dfsg-1ubuntu0.1.22.04.1 \ + pigz=2.6-1 \ + wget=1.21.2-2ubuntu1 \ + git=1:2.34.1-1ubuntu1.10 \ + iproute2=5.15.0-1ubuntu2 \ + cgroupfs-mount=1.4 \ + python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + tzdata=2023c-0ubuntu0.22.04.2 \ + libicu-dev=70.1-2 \ + bsdutils=1:2.37.2-4ubuntu3 \ + curl=7.81.0-1ubuntu1.14 \ + python3-pika=1.2.0-1 \ + liblua5.1-dev=5.1.5-8.1build4 \ + luajit=2.1.0~beta3+dfsg-6 \ + libssl-dev=3.0.2-0ubuntu1.12 \ + libcurl4-openssl-dev=7.81.0-1ubuntu1.14 \ + gdb=12.1-0ubuntu1~22.04 \ + default-jdk=2:1.11-72build2 \ + software-properties-common=0.99.22.8 \ + libkrb5-dev=1.19.2-2ubuntu0.3 \ + krb5-user=1.19.2-2ubuntu0.3 \ + g++=4:11.2.0-1ubuntu1 \ && rm -rf \ /var/lib/apt/lists/* \ /var/cache/debconf \ @@ -63,46 +63,46 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \ RUN python3 -m pip install --no-cache-dir \ - PyMySQL \ + PyMySQL==1.1.0 \ aerospike==11.1.0 \ - asyncio \ + asyncio==3.4.3\ avro==1.10.2 \ - azure-storage-blob \ - cassandra-driver \ + azure-storage-blob==12.19.0\ + cassandra-driver==3.28.0\ confluent-kafka==1.9.2 \ delta-spark==2.3.0 \ - dict2xml \ - dicttoxml \ - docker \ + dict2xml==1.7.3 \ + dicttoxml==1.7.16 \ + docker==6.1.3 \ docker-compose==1.29.2 \ - grpcio \ - grpcio-tools \ - kafka-python \ - kazoo \ - lz4 \ + grpcio==1.59.3 \ + grpcio-tools==1.59.3 \ + kafka-python==2.0.2 \ + kazoo==2.9.0 \ + lz4==4.3.2 \ meilisearch==0.18.3 \ - minio \ - nats-py \ - protobuf \ + minio==7.2.0 \ + nats-py==2.6.0 \ + protobuf==4.25.1 \ psycopg2-binary==2.9.6 \ - pyhdfs \ + pyhdfs==0.3.1 \ pymongo==3.11.0 \ pyspark==3.3.2 \ - pytest \ + pytest==7.4.3 \ pytest-order==1.0.0 \ - pytest-random \ - pytest-repeat \ - pytest-timeout \ - pytest-xdist \ - pytz \ + pytest-random==0.2 \ + pytest-repeat==0.9.3 \ + pytest-timeout==2.2.0 \ + pytest-xdist==3.5.0 \ + pytz==2023.3.post1 \ pyyaml==5.3.1 \ - redis \ + redis==5.0.1 \ requests-kerberos \ tzlocal==2.1 \ - retry \ - bs4 \ - lxml \ - urllib3 + retry==0.9.2 \ + bs4==0.0.1 \ + lxml==4.9.3 \ + urllib3==2.1.0 # bs4, lxml are for cloud tests, do not delete # Hudi supports only spark 3.3.*, not 3.4 From 383b4e2d87ef62b2b7fae554bc7450bc9d41dca1 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 12 Dec 2023 19:17:03 +0100 Subject: [PATCH 043/111] More pinned versions --- docker/test/base/Dockerfile | 14 +++--- docker/test/stateless/Dockerfile | 76 ++++++++++++++++---------------- docker/test/util/Dockerfile | 46 +++++++++---------- 3 files changed, 68 insertions(+), 68 deletions(-) diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 212710787d7e..305d3f107c9c 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -5,13 +5,13 @@ FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ - lcov \ - netbase \ - perl \ - pv \ - ripgrep \ - zstd \ - locales \ + lcov=1.15-1 \ + netbase=6.3 \ + perl=5.34.0-3ubuntu1.3 \ + pv=1.6.6-1build2 \ + ripgrep=13.0.0-2ubuntu0.1 \ + zstd=1.4.8+dfsg-3build1 \ + locales=2.35-0ubuntu3.4 \ --yes --no-install-recommends # Sanitizer options for services (clickhouse-server) diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 2ad0e2d06119..07923201b5f8 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -9,45 +9,45 @@ ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/down RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ apt-get install --yes --no-install-recommends \ - awscli \ - brotli \ - lz4 \ - expect \ - golang \ - lsof \ + awscli=1.22.34-1 \ + brotli=1.0.9-2build6 \ + lz4=1.9.3-2build2 \ + expect=5.45.4-2build1 \ + golang=2:1.18~0ubuntu2 \ + lsof=4.93.2+dfsg-1.1build2 \ mysql-client=8.0* \ - ncdu \ - netcat-openbsd \ - nodejs \ - npm \ - odbcinst \ - openjdk-11-jre-headless \ - openssl \ - postgresql-client \ - protobuf-compiler \ - python3 \ - python3-lxml \ - python3-pip \ - python3-requests \ - python3-termcolor \ - qemu-user-static \ - sqlite3 \ - sudo \ - tree \ - unixodbc \ - wget \ - rustc \ - cargo \ - zstd \ - file \ - pv \ - zip \ - p7zip-full \ - rpm2cpio \ - cpio \ + ncdu=1.15.1-1 \ + netcat-openbsd=1.218-4ubuntu1 \ + nodejs=12.22.9~dfsg-1ubuntu3.2 \ + npm=8.5.1~ds-1 \ + odbcinst=2.3.9-5 \ + openjdk-11-jre-headless=11.0.21+9-0ubuntu1~22.04 \ + openssl=3.0.2-0ubuntu1.12 \ + postgresql-client=14+238 \ + protobuf-compiler=3.12.4-1ubuntu7.22.04.1 \ + python3=3.10.6-1~22.04 \ + python3-lxml=4.8.0-1build1 \ + python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + python3-requests=2.25.1+dfsg-2ubuntu0.1 \ + python3-termcolor=1.1.0-3 \ + qemu-user-static=1:6.2+dfsg-2ubuntu6.15 \ + sqlite3=3.37.2-2ubuntu0.1 \ + sudo=1.9.9-1ubuntu2.4 \ + tree=2.0.2-1 \ + unixodbc=2.3.9-5 \ + wget=1.21.2-2ubuntu1 \ + rustc=1.70.0+dfsg0ubuntu1~bpo2-0ubuntu0.22.04.2 \ + cargo=1.70.0+dfsg0ubuntu1~bpo2-0ubuntu0.22.04.2 \ + zstd=1.4.8+dfsg-3build1 \ + file=1:5.41-3ubuntu0.1 \ + pv=1.6.6-1build2 \ + zip=3.0-12build2 \ + p7zip-full=16.02+dfsg-8 \ + rpm2cpio=4.17.0+dfsg1-4build1 \ + cpio=2.13+dfsg-7 \ && apt-get clean -RUN pip3 install numpy scipy pandas Jinja2 +RUN pip3 install numpy==1.26.2 scipy==1.11.4 pandas==2.1.3 Jinja2==3.1.2 RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \ @@ -83,8 +83,8 @@ ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_PASSWORD="clickhouse" ENV EXPORT_S3_STORAGE_POLICIES=1 -RUN npm install -g azurite \ - && npm install -g tslib +RUN npm install -g azurite@3.28.0 \ + && npm install -g tslib@2.6.2 COPY run.sh / COPY setup_minio.sh / diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 359041eed032..0209ff427828 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -10,15 +10,15 @@ ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=16 RUN apt-get update \ && apt-get install \ - apt-transport-https \ - apt-utils \ - ca-certificates \ - curl \ - dnsutils \ - gnupg \ - iputils-ping \ - lsb-release \ - wget \ + apt-transport-https=2.4.11 \ + apt-utils=2.4.11 \ + ca-certificates=20230311ubuntu0.22.04.1 \ + curl=7.81.0-1ubuntu1.14 \ + dnsutils=1:9.18.18-0ubuntu0.22.04.1 \ + gnupg=2.2.27-3ubuntu2.1 \ + iputils-ping=3:20211215-1 \ + lsb-release=11.1.0ubuntu4 \ + wget=1.21.2-2ubuntu1 \ --yes --no-install-recommends --verbose-versions \ && export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \ && wget -nv -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \ @@ -38,27 +38,27 @@ RUN curl -s https://apt.kitware.com/keys/kitware-archive-latest.asc | \ # initial packages RUN apt-get update \ && apt-get install \ - bash \ - bsdmainutils \ - build-essential \ + bash=5.1-6ubuntu1 \ + bsdmainutils=12.1.7+nmu3ubuntu2 \ + build-essential=12.9ubuntu3 \ clang-${LLVM_VERSION} \ clang-tidy-${LLVM_VERSION} \ - cmake \ - gdb \ - git \ - gperf \ + cmake=3.27.7-0kitware1ubuntu22.04.1 \ + gdb=12.1-0ubuntu1~22.04 \ + git=1:2.34.1-1ubuntu1.10 \ + gperf=3.1-1build1 \ libclang-rt-${LLVM_VERSION}-dev \ lld-${LLVM_VERSION} \ llvm-${LLVM_VERSION} \ llvm-${LLVM_VERSION}-dev \ libclang-${LLVM_VERSION}-dev \ - moreutils \ - nasm \ - ninja-build \ - pigz \ - rename \ - software-properties-common \ - tzdata \ + moreutils=0.66-1 \ + nasm=2.15.05-1 \ + ninja-build=1.10.1-1 \ + pigz=2.6-1 \ + rename=1.30-1 \ + software-properties-common=0.99.22.8 \ + tzdata=2023c-0ubuntu0.22.04.2 \ --yes --no-install-recommends \ && apt-get clean From 24e69571021472b42b54c2162a61a6d7133aee68 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 13 Dec 2023 11:28:40 +0100 Subject: [PATCH 044/111] Updated pinned versions so images can be actually built --- docker/test/integration/base/Dockerfile | 4 ++-- docker/test/integration/helper_container/Dockerfile | 2 +- docker/test/util/Dockerfile | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 82dfc51162f0..1a1c2d85bc53 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -8,7 +8,7 @@ SHELL ["/bin/bash", "-c"] RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get -y install \ bsdutils=1:2.37.2-4ubuntu3 \ - curl=7.81.0-1ubuntu1.14 \ + curl='7.81.*' \ default-jre=2:1.11-72build2 \ g++=4:11.2.0-1ubuntu1 \ gdb=12.1-0ubuntu1~22.04 \ @@ -28,7 +28,7 @@ RUN apt-get update \ tzdata=2023c-0ubuntu0.22.04.2 \ unixodbc=2.3.9-5 \ python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - libcurl4-openssl-dev=7.81.0-1ubuntu1.14 \ + libcurl4-openssl-dev='7.81.*' \ libssl-dev=3.0.2-0ubuntu1.12 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index eb2d6e4c439a..518076088757 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -1,7 +1,7 @@ # docker build -t clickhouse/integration-helper . # Helper docker container to run iptables without sudo -FROM alpine +FROM alpine:3.16 RUN apk add --no-cache -U iproute2=6.3.0-r0 \ && for bin in iptables iptables-restore iptables-save; \ do ln -sf xtables-nft-multi "/sbin/$bin"; \ diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 0209ff427828..4a4b6874cfc8 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update \ apt-transport-https=2.4.11 \ apt-utils=2.4.11 \ ca-certificates=20230311ubuntu0.22.04.1 \ - curl=7.81.0-1ubuntu1.14 \ + curl='7.81.*' \ dnsutils=1:9.18.18-0ubuntu0.22.04.1 \ gnupg=2.2.27-3ubuntu2.1 \ iputils-ping=3:20211215-1 \ @@ -43,7 +43,7 @@ RUN apt-get update \ build-essential=12.9ubuntu3 \ clang-${LLVM_VERSION} \ clang-tidy-${LLVM_VERSION} \ - cmake=3.27.7-0kitware1ubuntu22.04.1 \ + cmake='3.27.*' \ gdb=12.1-0ubuntu1~22.04 \ git=1:2.34.1-1ubuntu1.10 \ gperf=3.1-1build1 \ From 8fec23f195a6137a82000170374bec7d1e6ce650 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 13 Dec 2023 15:16:02 +0100 Subject: [PATCH 045/111] Fixes to pinned versions Fixed building altinityinfra/integration-test typo Relaxed version requirements on some dependencies to allow images to build. Some of the versions used by upstream are no longer available, that is most likely because of the slight changes in base images (like ubuntu:22.04). --- docker/test/base/Dockerfile | 2 +- docker/test/integration/base/Dockerfile | 2 +- docker/test/integration/runner/Dockerfile | 6 +++--- docker/test/util/Dockerfile | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 305d3f107c9c..53a74df029e8 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -11,7 +11,7 @@ RUN apt-get update \ pv=1.6.6-1build2 \ ripgrep=13.0.0-2ubuntu0.1 \ zstd=1.4.8+dfsg-3build1 \ - locales=2.35-0ubuntu3.4 \ + locales='2.35*' \ --yes --no-install-recommends # Sanitizer options for services (clickhouse-server) diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 1a1c2d85bc53..d1618ccc1338 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -33,7 +33,7 @@ RUN apt-get update \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -RUN pip3 install pycurl=7.45.2 +RUN pip3 install pycurl==7.45.2 # Architecture of the image when BuildKit/buildx is used ARG TARGETARCH diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 27b424e4e03b..620f60ada61b 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -25,12 +25,12 @@ RUN apt-get update \ tzdata=2023c-0ubuntu0.22.04.2 \ libicu-dev=70.1-2 \ bsdutils=1:2.37.2-4ubuntu3 \ - curl=7.81.0-1ubuntu1.14 \ + curl='7.81.*' \ python3-pika=1.2.0-1 \ - liblua5.1-dev=5.1.5-8.1build4 \ + liblua5.1-dev \ luajit=2.1.0~beta3+dfsg-6 \ libssl-dev=3.0.2-0ubuntu1.12 \ - libcurl4-openssl-dev=7.81.0-1ubuntu1.14 \ + libcurl4-openssl-dev='7.81.*' \ gdb=12.1-0ubuntu1~22.04 \ default-jdk=2:1.11-72build2 \ software-properties-common=0.99.22.8 \ diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 4a4b6874cfc8..c30ce221c188 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -43,7 +43,7 @@ RUN apt-get update \ build-essential=12.9ubuntu3 \ clang-${LLVM_VERSION} \ clang-tidy-${LLVM_VERSION} \ - cmake='3.27.*' \ + cmake='3.*' \ gdb=12.1-0ubuntu1~22.04 \ git=1:2.34.1-1ubuntu1.10 \ gperf=3.1-1build1 \ From 126ef63b2079badf8b65d3d5e05406f427b931c6 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 13 Dec 2023 18:42:50 +0100 Subject: [PATCH 046/111] Unpinned version of iproute2 for altinityinfra/integration-helper --- docker/test/integration/helper_container/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index 518076088757..45f790bfb1ef 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -2,7 +2,7 @@ # Helper docker container to run iptables without sudo FROM alpine:3.16 -RUN apk add --no-cache -U iproute2=6.3.0-r0 \ +RUN apk add --no-cache -U iproute2 \ && for bin in iptables iptables-restore iptables-save; \ do ln -sf xtables-nft-multi "/sbin/$bin"; \ done From e27082aec25955d2c2fff40e4c3f9fab0cefab2f Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 13 Dec 2023 18:49:35 +0100 Subject: [PATCH 047/111] set alpine:3.18 as base for altinityinfra/integration-helper --- docker/test/integration/helper_container/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index 45f790bfb1ef..aaff4e872297 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -1,8 +1,8 @@ # docker build -t clickhouse/integration-helper . # Helper docker container to run iptables without sudo -FROM alpine:3.16 -RUN apk add --no-cache -U iproute2 \ +FROM alpine:3.18 +RUN apk add --no-cache -U iproute2~=6.3 \ && for bin in iptables iptables-restore iptables-save; \ do ln -sf xtables-nft-multi "/sbin/$bin"; \ done From 2eec9e01373db87d0cfc377e7251b37620d2e530 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 14 Dec 2023 15:37:32 +0100 Subject: [PATCH 048/111] Caching pre-pulled docker images for later re-use by integration tests --- tests/ci/integration_test_check.py | 6 ++++++ tests/integration/ci-runner.py | 10 +++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index 04259d46afde..ce3df6cbd9dd 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -60,6 +60,7 @@ def get_json_params_dict( docker_images: List[DockerImage], run_by_hash_total: int, run_by_hash_num: int, + dockerd_volume_dir: str ) -> dict: return { "context_name": check_name, @@ -72,6 +73,7 @@ def get_json_params_dict( "disable_net_host": True, "run_by_hash_total": run_by_hash_total, "run_by_hash_num": run_by_hash_num, + "dockerd_volume_dir": dockerd_volume_dir, } @@ -227,6 +229,9 @@ def main(): build_path = temp_path / "build" build_path.mkdir(parents=True, exist_ok=True) + dockerd_volume_dir = temp_path / "dockerd_volume_dir" + dockerd_volume_dir.mkdir(parents=True, exist_ok=True) + if validate_bugfix_check: download_last_release(build_path) else: @@ -245,6 +250,7 @@ def main(): images, run_by_hash_total, run_by_hash_num, + dockerd_volume_dir, ) ) json_params.write(params_text) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index f9aeb520c630..af397644346e 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -255,6 +255,7 @@ def __init__(self, result_path, params): ) # if use_tmpfs is not set we assume it to be true, otherwise check self.use_tmpfs = "use_tmpfs" not in self.params or self.params["use_tmpfs"] + self.dockerd_volume_dir = self.params.get("dockerd_volume_dir", None) self.disable_net_host = ( "disable_net_host" in self.params and self.params["disable_net_host"] ) @@ -419,8 +420,11 @@ def _compress_logs(self, dir, relpaths, result_path): def _get_runner_opts(self): result = [] - if self.use_tmpfs: + if self.dockerd_volume_dir: + result.append(f"--dockerd-volume-dir={self.dockerd_volume_dir}") + elif self.use_tmpfs: result.append("--tmpfs") + if self.disable_net_host: result.append("--disable-net-host") if self.use_analyzer: @@ -877,6 +881,10 @@ def run_impl(self, repo_path, build_path): logging.info("Pulling images") runner._pre_pull_images(repo_path) + if self.dockerd_volume_dir: + logging.info("Cached pre-pulled docker images into %s:\n%s", + self.dockerd_volume_dir, + subprocess.check_output(f"ls -Rlah {shlex.quote(self.dockerd_volume_dir)}", shell=True)) logging.info( "Dump iptables before run %s", From 1806956df53b17d86a9d11f4c25e86ffa3e93b92 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 14 Dec 2023 15:41:41 +0100 Subject: [PATCH 049/111] Fixed type of get_json_params_dict parameter --- tests/ci/integration_test_check.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index ce3df6cbd9dd..2b77ddaa8ff4 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -60,7 +60,7 @@ def get_json_params_dict( docker_images: List[DockerImage], run_by_hash_total: int, run_by_hash_num: int, - dockerd_volume_dir: str + dockerd_volume_dir: Path ) -> dict: return { "context_name": check_name, @@ -73,7 +73,7 @@ def get_json_params_dict( "disable_net_host": True, "run_by_hash_total": run_by_hash_total, "run_by_hash_num": run_by_hash_num, - "dockerd_volume_dir": dockerd_volume_dir, + "dockerd_volume_dir": dockerd_volume_dir.as_posix(), } From 72c29d0ba14878d499007bd502ae11ad4a6e668e Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 14 Dec 2023 19:18:20 +0100 Subject: [PATCH 050/111] Not using ppa:ubuntu-toolchain-r/test since it is not ubuntu 20 anymore --- docker/packager/binary/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index 28271ff3b054..b703fa82665f 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -59,8 +59,7 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ # NOTE: Seems like gcc-11 is too new for ubuntu20 repository # A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work): -RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \ - && apt-get update \ +RUN apt-get update \ && apt-get install --yes \ binutils-riscv64-linux-gnu \ build-essential \ From 4bdf5797e16ce8210f56c8d1d4408b8e69b3b2f4 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 14 Dec 2023 23:55:24 +0100 Subject: [PATCH 051/111] Removed lengthy debug output --- tests/integration/ci-runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index af397644346e..a1459bbad3d0 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -884,7 +884,7 @@ def run_impl(self, repo_path, build_path): if self.dockerd_volume_dir: logging.info("Cached pre-pulled docker images into %s:\n%s", self.dockerd_volume_dir, - subprocess.check_output(f"ls -Rlah {shlex.quote(self.dockerd_volume_dir)}", shell=True)) + subprocess.check_output(f"du -hs {shlex.quote(self.dockerd_volume_dir)}", shell=True)) logging.info( "Dump iptables before run %s", From d77fac5b97f8328c72739fc9edd02e487271409b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 6 Dec 2023 00:22:15 +0100 Subject: [PATCH 052/111] Attempt to fix failing tests by reducing batch size and reducing parallelism Also start regressions after integration tests, maybe that will help with docker pull limit --- .github/workflows/release_branches.yml | 4 ++-- tests/integration/ci-runner.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e90c935eb93b..333f7965d77d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -312,7 +312,7 @@ jobs: with: test_name: Integration tests (release) runner_type: stress-tester, func-tester - batches: 4 + batches: 6 run_command: | cd "$REPO_COPY/tests/ci" python3 integration_test_check.py "$CHECK_NAME" @@ -322,7 +322,7 @@ jobs: ############################################################################################# RegressionStart: ## Not depending on the tests above since they can fail at any given moment. - needs: [BuilderDebRelease, BuilderDebAarch64] + needs: [BuilderDebRelease, BuilderDebAarch64, IntegrationTestsRelease] runs-on: ubuntu-latest steps: - run: true diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index a1459bbad3d0..3c274463da98 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -15,8 +15,8 @@ import zlib # for crc32 -MAX_RETRY = 1 -NUM_WORKERS = 5 +MAX_RETRY = 3 +NUM_WORKERS = 3 SLEEP_BETWEEN_RETRIES = 5 PARALLEL_GROUP_SIZE = 100 CLICKHOUSE_BINARY_PATH = "usr/bin/clickhouse" From 0fdc69971c1eff20a35a06e4c1c5be405f188628 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 15 Dec 2023 09:53:58 +0100 Subject: [PATCH 053/111] Reduce parallelism on each re-run to improve chancess of test passing --- tests/integration/ci-runner.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 3c274463da98..02ff73ef4b66 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -16,7 +16,7 @@ MAX_RETRY = 3 -NUM_WORKERS = 3 +NUM_WORKERS = 5 SLEEP_BETWEEN_RETRIES = 5 PARALLEL_GROUP_SIZE = 100 CLICKHOUSE_BINARY_PATH = "usr/bin/clickhouse" @@ -677,6 +677,11 @@ def run_test_group( parallel_cmd = ( " --parallel {} ".format(num_workers) if num_workers > 0 else "" ) + # For each re-run reduce number of workers, + # to improve chances of tests passing. + if num_workers and num_workers > 0: + num_workers = max(1, num_workers // 2) + # -r -- show extra test summary: # -f -- (f)ailed # -E -- (E)rror From 4223ff32c1a4ef3a05cc84b24b6db88266f987e6 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 22 Mar 2024 16:10:24 +0000 Subject: [PATCH 054/111] Updated dependencies version requirements to be less strict. --- docker/test/util/Dockerfile | 40 ++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index c30ce221c188..9ae5a1abc20d 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -10,15 +10,15 @@ ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=16 RUN apt-get update \ && apt-get install \ - apt-transport-https=2.4.11 \ - apt-utils=2.4.11 \ - ca-certificates=20230311ubuntu0.22.04.1 \ + apt-transport-https='2.4.*' \ + apt-utils='2.4.*' \ + ca-certificates='20230311ubuntu0.22.04.*' \ curl='7.81.*' \ - dnsutils=1:9.18.18-0ubuntu0.22.04.1 \ - gnupg=2.2.27-3ubuntu2.1 \ + dnsutils='1:9.18.*' \ + gnupg='2.2.*' \ iputils-ping=3:20211215-1 \ - lsb-release=11.1.0ubuntu4 \ - wget=1.21.2-2ubuntu1 \ + lsb-release='11.1.*' \ + wget='1.21.*' \ --yes --no-install-recommends --verbose-versions \ && export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \ && wget -nv -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \ @@ -38,27 +38,27 @@ RUN curl -s https://apt.kitware.com/keys/kitware-archive-latest.asc | \ # initial packages RUN apt-get update \ && apt-get install \ - bash=5.1-6ubuntu1 \ - bsdmainutils=12.1.7+nmu3ubuntu2 \ - build-essential=12.9ubuntu3 \ + bash='5.1*' \ + bsdmainutils='12.1.*' \ + build-essential='12.9*' \ clang-${LLVM_VERSION} \ clang-tidy-${LLVM_VERSION} \ cmake='3.*' \ - gdb=12.1-0ubuntu1~22.04 \ - git=1:2.34.1-1ubuntu1.10 \ - gperf=3.1-1build1 \ + gdb='12.1*' \ + git='1:2.34.*' \ + gperf='3.1*' \ libclang-rt-${LLVM_VERSION}-dev \ lld-${LLVM_VERSION} \ llvm-${LLVM_VERSION} \ llvm-${LLVM_VERSION}-dev \ libclang-${LLVM_VERSION}-dev \ - moreutils=0.66-1 \ - nasm=2.15.05-1 \ - ninja-build=1.10.1-1 \ - pigz=2.6-1 \ - rename=1.30-1 \ - software-properties-common=0.99.22.8 \ - tzdata=2023c-0ubuntu0.22.04.2 \ + moreutils='0.66*' \ + nasm='2.15.*' \ + ninja-build='1.10.*' \ + pigz='2.6*' \ + rename='1.30*' \ + software-properties-common='0.99.*' \ + tzdata='2023c-0ubuntu0.22.04.*' \ --yes --no-install-recommends \ && apt-get clean From 842e6d8bf3915a7c4d44c916cfb2f91d80082383 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Fri, 22 Mar 2024 16:43:34 +0000 Subject: [PATCH 055/111] More dependencies with less strick version requirements. --- docker/test/base/Dockerfile | 12 +- docker/test/integration/base/Dockerfile | 44 +++--- docker/test/integration/runner/Dockerfile | 128 +++++++++--------- docker/test/performance-comparison/Dockerfile | 2 +- docker/test/stateless/Dockerfile | 72 +++++----- docker/test/style/Dockerfile | 4 +- 6 files changed, 131 insertions(+), 131 deletions(-) diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 53a74df029e8..4ec7b6d1fb93 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -5,12 +5,12 @@ FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ - lcov=1.15-1 \ - netbase=6.3 \ - perl=5.34.0-3ubuntu1.3 \ - pv=1.6.6-1build2 \ - ripgrep=13.0.0-2ubuntu0.1 \ - zstd=1.4.8+dfsg-3build1 \ + lcov='1.15*' \ + netbase='6.3*' \ + perl='5.34.*' \ + pv='1.6.*' \ + ripgrep='13.0.*' \ + zstd='1.4.*' \ locales='2.35*' \ --yes --no-install-recommends diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index d1618ccc1338..042da1530e27 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -7,33 +7,33 @@ SHELL ["/bin/bash", "-c"] RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get -y install \ - bsdutils=1:2.37.2-4ubuntu3 \ + bsdutils='1:2.37.*' \ curl='7.81.*' \ - default-jre=2:1.11-72build2 \ - g++=4:11.2.0-1ubuntu1 \ - gdb=12.1-0ubuntu1~22.04 \ - iproute2=5.15.0-1ubuntu2 \ - krb5-user=1.19.2-2ubuntu0.3 \ - libicu-dev=70.1-2 \ - libsqlite3-dev=3.37.2-2ubuntu0.1 \ - libsqliteodbc=0.9998-3 \ - lsof=4.93.2+dfsg-1.1build2 \ - lz4=1.9.3-2build2 \ - odbc-postgresql=1:13.02.0000-2 \ - odbcinst=2.3.9-5 \ - python3=3.10.6-1~22.04 \ - rpm2cpio=4.17.0+dfsg1-4build1 \ - sqlite3=3.37.2-2ubuntu0.1 \ - tar=1.34+dfsg-1ubuntu0.1.22.04.1 \ - tzdata=2023c-0ubuntu0.22.04.2 \ - unixodbc=2.3.9-5 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + default-jre='2:1.11-*' \ + g++='4:11.2.*' \ + gdb='12.1-*' \ + iproute2='5.15.*' \ + krb5-user='1.19.*' \ + libicu-dev='70.1-*' \ + libsqlite3-dev='3.37.*' \ + libsqliteodbc='0.999*' \ + lsof='4.93.*' \ + lz4='1.9.*' \ + odbc-postgresql='1:13.02.*' \ + odbcinst='2.3.*' \ + python3='3.10.*' \ + rpm2cpio='4.17.*' \ + sqlite3='3.37.*' \ + tar='1.34*' \ + tzdata='2023c*' \ + unixodbc='2.3.*' \ + python3-pip='22.0.*' \ libcurl4-openssl-dev='7.81.*' \ - libssl-dev=3.0.2-0ubuntu1.12 \ + libssl-dev='3.0.*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -RUN pip3 install pycurl==7.45.2 +RUN pip3 install pycurl~=7.45.2 # Architecture of the image when BuildKit/buildx is used ARG TARGETARCH diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 681877d05d1f..30aa5f303afc 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -8,35 +8,35 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ - adduser=3.118ubuntu5 \ - ca-certificates=20230311ubuntu0.22.04.1 \ - bash=5.1-6ubuntu1 \ - btrfs-progs=5.16.2-1 \ - e2fsprogs=1.46.5-2ubuntu1.1 \ - iptables=1.8.7-1ubuntu5.1 \ - xfsprogs=5.13.0-1ubuntu2 \ - tar=1.34+dfsg-1ubuntu0.1.22.04.1 \ - pigz=2.6-1 \ - wget=1.21.2-2ubuntu1 \ - git=1:2.34.1-1ubuntu1.10 \ - iproute2=5.15.0-1ubuntu2 \ + adduser='3.11*' \ + ca-certificates='2023*' \ + bash='5.1-*' \ + btrfs-progs='5.16.*' \ + e2fsprogs='1.46.*' \ + iptables='1.8.*' \ + xfsprogs='5.13.*' \ + tar='1.34*' \ + pigz='2.6*' \ + wget='1.21.*' \ + git='1:2.34.*' \ + iproute2='5.15.*' \ cgroupfs-mount=1.4 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - tzdata=2023c-0ubuntu0.22.04.2 \ - libicu-dev=70.1-2 \ - bsdutils=1:2.37.2-4ubuntu3 \ + python3-pip='22.0.*' \ + tzdata='2023c*' \ + libicu-dev='70.1*' \ + bsdutils='1:2.37.*' \ curl='7.81.*' \ - python3-pika=1.2.0-1 \ + python3-pika='1.2.*' \ liblua5.1-dev \ - luajit=2.1.0~beta3+dfsg-6 \ - libssl-dev=3.0.2-0ubuntu1.12 \ + luajit='2.1.*' \ + libssl-dev='3.0.*' \ libcurl4-openssl-dev='7.81.*' \ - gdb=12.1-0ubuntu1~22.04 \ - default-jdk=2:1.11-72build2 \ - software-properties-common=0.99.22.8 \ - libkrb5-dev=1.19.2-2ubuntu0.3 \ - krb5-user=1.19.2-2ubuntu0.3 \ - g++=4:11.2.0-1ubuntu1 \ + gdb='12.1-*' \ + default-jdk='2:1.11-*' \ + software-properties-common='0.99.*' \ + libkrb5-dev='1.19.*' \ + krb5-user='1.19.*' \ + g++='4:11.2.*' \ && rm -rf \ /var/lib/apt/lists/* \ /var/cache/debconf \ @@ -65,46 +65,46 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \ # kazoo 2.10.0 is broken # https://s3.amazonaws.com/clickhouse-test-reports/59337/524625a1d2f4cc608a3f1059e3df2c30f353a649/integration_tests__asan__analyzer__[5_6].html RUN python3 -m pip install --no-cache-dir \ - PyMySQL==1.1.0 \ - aerospike==11.1.0 \ - asyncio==3.4.3\ - avro==1.10.2 \ - azure-storage-blob==12.19.0\ - cassandra-driver==3.28.0\ - confluent-kafka==1.9.2 \ - delta-spark==2.3.0 \ - dict2xml==1.7.3 \ - dicttoxml==1.7.16 \ - docker==6.1.3 \ - docker-compose==1.29.2 \ - grpcio==1.59.3 \ - grpcio-tools==1.59.3 \ - kafka-python==2.0.2 \ - kazoo==2.9.0 \ - lz4==4.3.2 \ - meilisearch==0.18.3 \ - minio==7.2.0 \ - nats-py==2.6.0 \ - protobuf==4.25.1 \ - psycopg2-binary==2.9.6 \ - pyhdfs==0.3.1 \ - pymongo==3.11.0 \ - pyspark==3.3.2 \ - pytest==7.4.3 \ - pytest-order==1.0.0 \ - pytest-random==0.2 \ - pytest-repeat==0.9.3 \ - pytest-timeout==2.2.0 \ - pytest-xdist==3.5.0 \ - pytz==2023.3.post1 \ - pyyaml==5.3.1 \ - redis==5.0.1 \ + PyMySQL~=1.1.0 \ + aerospike~=11.1.0 \ + asyncio~=3.4.3\ + avro~=1.10.2 \ + azure-storage-blob~=12.19.0\ + cassandra-driver~=3.28.0\ + confluent-kafka~=1.9.2 \ + delta-spark~=2.3.0 \ + dict2xml~=1.7.3 \ + dicttoxml~=1.7.16 \ + docker~=6.1.3 \ + docker-compose~=1.29.2 \ + grpcio~=1.59.3 \ + grpcio-tools~=1.59.3 \ + kafka-python~=2.0.2 \ + kazoo~=2.9.0 \ + lz4~=4.3.2 \ + meilisearch~=0.18.3 \ + minio~=7.2.0 \ + nats-py~=2.6.0 \ + protobuf~=4.25.1 \ + psycopg2-binary~=2.9.6 \ + pyhdfs~=0.3.1 \ + pymongo~=3.11.0 \ + pyspark~=3.3.2 \ + pytest~=7.4.3 \ + pytest-order~=1.0.0 \ + pytest-random~=0.2 \ + pytest-repeat~=0.9.3 \ + pytest-timeout~=2.2.0 \ + pytest-xdist~=3.5.0 \ + pytz~=2023.3.post1 \ + pyyaml~=5.3.1 \ + redis~=5.0.1 \ requests-kerberos \ - tzlocal==2.1 \ - retry==0.9.2 \ - bs4==0.0.1 \ - lxml==4.9.3 \ - urllib3==2.1.0 + tzlocal~=2.1 \ + retry~=0.9.2 \ + bs4~=0.0.1 \ + lxml~=4.9.3 \ + urllib3~=2.1.0 # bs4, lxml are for cloud tests, do not delete # Hudi supports only spark 3.3.*, not 3.4 diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile index 9864cfe6649e..edf1bc4e4164 100644 --- a/docker/test/performance-comparison/Dockerfile +++ b/docker/test/performance-comparison/Dockerfile @@ -33,7 +33,7 @@ RUN apt-get update \ cargo \ ripgrep \ zstd \ - && pip3 --no-cache-dir install 'clickhouse-driver==0.2.1' scipy \ + && pip3 --no-cache-dir install 'clickhouse-driver~=0.2.1' scipy \ && apt-get purge --yes python3-dev g++ \ && apt-get autoremove --yes \ && apt-get clean \ diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 29addb657d93..21888e6845aa 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -9,45 +9,45 @@ ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/down RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ apt-get install --yes --no-install-recommends \ - awscli=1.22.34-1 \ - brotli=1.0.9-2build6 \ - lz4=1.9.3-2build2 \ - expect=5.45.4-2build1 \ - golang=2:1.18~0ubuntu2 \ - lsof=4.93.2+dfsg-1.1build2 \ - mysql-client=8.0* \ - ncdu=1.15.1-1 \ - netcat-openbsd=1.218-4ubuntu1 \ - nodejs=12.22.9~dfsg-1ubuntu3.2 \ - npm=8.5.1~ds-1 \ - odbcinst=2.3.9-5 \ - openjdk-11-jre-headless=11.0.21+9-0ubuntu1~22.04 \ - openssl=3.0.2-0ubuntu1.12 \ + awscli='1.22.*' \ + brotli='1.0.*' \ + lz4='1.9.*' \ + expect='5.45.*' \ + golang='2:1.18~*' \ + lsof='4.93.*' \ + mysql-client='8.0*' \ + ncdu='1.15.*' \ + netcat-openbsd='1.218-*' \ + nodejs='12.22.*' \ + npm='8.5.*' \ + odbcinst='2.3.*' \ + openjdk-11-jre-headless='11.0.*' \ + openssl='3.0.*' \ postgresql-client=14+238 \ - protobuf-compiler=3.12.4-1ubuntu7.22.04.1 \ - python3=3.10.6-1~22.04 \ - python3-lxml=4.8.0-1build1 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - python3-requests=2.25.1+dfsg-2ubuntu0.1 \ - python3-termcolor=1.1.0-3 \ - qemu-user-static=1:6.2+dfsg-2ubuntu6.15 \ - sqlite3=3.37.2-2ubuntu0.1 \ - sudo=1.9.9-1ubuntu2.4 \ - tree=2.0.2-1 \ - unixodbc=2.3.9-5 \ - wget=1.21.2-2ubuntu1 \ - rustc=1.70.0+dfsg0ubuntu1~bpo2-0ubuntu0.22.04.2 \ - cargo=1.70.0+dfsg0ubuntu1~bpo2-0ubuntu0.22.04.2 \ - zstd=1.4.8+dfsg-3build1 \ - file=1:5.41-3ubuntu0.1 \ - pv=1.6.6-1build2 \ - zip=3.0-12build2 \ - p7zip-full=16.02+dfsg-8 \ - rpm2cpio=4.17.0+dfsg1-4build1 \ - cpio=2.13+dfsg-7 \ + protobuf-compiler='3.12.*' \ + python3='3.10.*' \ + python3-lxml='4.8.*' \ + python3-pip='22.0.*' \ + python3-requests='2.25.*' \ + python3-termcolor='1.1.*' \ + qemu-user-static='1:6.2*' \ + sqlite3='3.37.*' \ + sudo='1.9.*' \ + tree='2.0.*' \ + unixodbc='2.3.*' \ + wget='1.21.*' \ + rustc='1.70.*' \ + cargo='1.70.*' \ + zstd='1.4.*' \ + file='1:5.41-*' \ + pv='1.6.*' \ + zip='3.0-*' \ + p7zip-full='16.02*' \ + rpm2cpio='4.17.*' \ + cpio='2.13*' \ && apt-get clean -RUN pip3 install numpy==1.26.3 scipy==1.12.0 pandas==1.5.3 Jinja2==3.1.3 +RUN pip3 install numpy~=1.26.3 scipy~=1.12.0 pandas~=1.5.3 Jinja2~=3.1.3 RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \ diff --git a/docker/test/style/Dockerfile b/docker/test/style/Dockerfile index a4feae27c675..4de7ce812487 100644 --- a/docker/test/style/Dockerfile +++ b/docker/test/style/Dockerfile @@ -19,9 +19,9 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ shellcheck \ yamllint \ locales \ - && pip3 install black==23.1.0 boto3 codespell==2.2.1 mypy==1.3.0 PyGithub unidiff pylint==2.6.2 \ + && pip3 install black~=23.1.0 boto3 codespell~=2.2.1 mypy~=1.3.0 PyGithub unidiff pylint~=2.6.2 \ && apt-get clean \ - && rm -rf /root/.cache/pip + && rm -rf /root/.cache/pip RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen en_US.UTF-8 ENV LC_ALL en_US.UTF-8 From d894ee3b3ff8c9201048b3b6bd02485520802c2b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 25 Mar 2024 08:45:43 +0000 Subject: [PATCH 056/111] Attemt to fix tzdata installation issue. --- docker/test/integration/base/Dockerfile | 2 +- docker/test/integration/runner/Dockerfile | 2 +- docker/test/util/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 042da1530e27..11ef26c8116f 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -25,7 +25,7 @@ RUN apt-get update \ rpm2cpio='4.17.*' \ sqlite3='3.37.*' \ tar='1.34*' \ - tzdata='2023c*' \ + tzdata \ unixodbc='2.3.*' \ python3-pip='22.0.*' \ libcurl4-openssl-dev='7.81.*' \ diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 30aa5f303afc..ecb4514edd99 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -22,7 +22,7 @@ RUN apt-get update \ iproute2='5.15.*' \ cgroupfs-mount=1.4 \ python3-pip='22.0.*' \ - tzdata='2023c*' \ + tzdata \ libicu-dev='70.1*' \ bsdutils='1:2.37.*' \ curl='7.81.*' \ diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 9ae5a1abc20d..f2041fe445c3 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -58,7 +58,7 @@ RUN apt-get update \ pigz='2.6*' \ rename='1.30*' \ software-properties-common='0.99.*' \ - tzdata='2023c-0ubuntu0.22.04.*' \ + tzdata \ --yes --no-install-recommends \ && apt-get clean From 481d437769319dc80abf18a63286dbb208f731d8 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 25 Mar 2024 12:57:51 +0000 Subject: [PATCH 057/111] Fixed rust & cargo installation --- docker/test/stateless/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 21888e6845aa..2ba50c14ee92 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -23,7 +23,7 @@ RUN apt-get update -y \ odbcinst='2.3.*' \ openjdk-11-jre-headless='11.0.*' \ openssl='3.0.*' \ - postgresql-client=14+238 \ + postgresql-client='14+*' \ protobuf-compiler='3.12.*' \ python3='3.10.*' \ python3-lxml='4.8.*' \ @@ -36,8 +36,8 @@ RUN apt-get update -y \ tree='2.0.*' \ unixodbc='2.3.*' \ wget='1.21.*' \ - rustc='1.70.*' \ - cargo='1.70.*' \ + rustc='1.*' \ + cargo='1.*' \ zstd='1.4.*' \ file='1:5.41-*' \ pv='1.6.*' \ From 0af2bd9786836e734263b5e99fbb8b3e4e50d5b0 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Mon, 25 Mar 2024 14:07:33 +0000 Subject: [PATCH 058/111] Another iteration of fixing docker images --- docker/test/sqllogic/Dockerfile | 2 +- docker/test/sqltest/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/test/sqllogic/Dockerfile b/docker/test/sqllogic/Dockerfile index 5cf71e4d3f84..508fd25d6f42 100644 --- a/docker/test/sqllogic/Dockerfile +++ b/docker/test/sqllogic/Dockerfile @@ -1,6 +1,6 @@ # docker build -t clickhouse/sqllogic-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update --yes \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/sqltest/Dockerfile b/docker/test/sqltest/Dockerfile index 437677f4fd1f..d09a20e96cbb 100644 --- a/docker/test/sqltest/Dockerfile +++ b/docker/test/sqltest/Dockerfile @@ -1,6 +1,6 @@ # docker build -t clickhouse/sqltest . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update --yes \ && env DEBIAN_FRONTEND=noninteractive \ From fc960dc6bad6319911654d7489f9c5c42609d8f9 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 26 Mar 2024 15:32:15 +0000 Subject: [PATCH 059/111] Attempt to debug dotnet package installation issue --- docker/test/integration/dotnet_client/Dockerfile | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docker/test/integration/dotnet_client/Dockerfile b/docker/test/integration/dotnet_client/Dockerfile index f8d334151759..3ea07a3a6c08 100644 --- a/docker/test/integration/dotnet_client/Dockerfile +++ b/docker/test/integration/dotnet_client/Dockerfile @@ -6,5 +6,20 @@ FROM mcr.microsoft.com/dotnet/sdk:3.1 WORKDIR /client COPY *.cs *.csproj /client/ +# Troubleshoout api.nuget.org connection timeout +RUN apt update && apt install -y \ + sslscan \ + && sslscan \ + --show-certificate \ + --no-check-certificate \ + --show-client-cas \ + --show-ciphers \ + --show-cipher-ids \ + --show-times \ + api.nuget.org:443 \ + ; openssl s_client -connect api.nuget.org:443 \ + ; curl -vvvvI https://api.nuget.org/v3/index.json \ + || : + ARG VERSION=4.1.0 RUN dotnet add package ClickHouse.Client -v ${VERSION} From 6033432f0017f7452aa63e94f51445d869f8a292 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 26 Mar 2024 11:37:06 -0700 Subject: [PATCH 060/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 333f7965d77d..cee367b41aed 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -49,6 +49,10 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix aarch64 + env: + RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" + RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" + - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: @@ -73,6 +77,10 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix amd64 + env: + RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" + RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" + - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: From e48f1a728a46f14b24d893aa09837f6bac16e3d9 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 26 Mar 2024 11:40:31 -0700 Subject: [PATCH 061/111] Update docker_images_check.py --- tests/ci/docker_images_check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index abcfd1379b3a..54c563d36c57 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -293,7 +293,7 @@ def process_single_image( logging.info( "Got error will retry %s time and sleep for %s seconds", i, i * 5 ) - time.sleep(i * 5) + time.sleep(i * 30) else: results.append( TestResult( From f480e2695bfff472eaa85e1c3600c958ff28fee2 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 26 Mar 2024 11:47:45 -0700 Subject: [PATCH 062/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index cee367b41aed..a6d26306b9e5 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -32,7 +32,7 @@ on: # yamllint disable-line rule:truthy jobs: DockerHubPushAarch64: - runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-fsn1, image-arm-app-docker-ce] + runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-hel1, image-arm-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 From d59046187437aa0463ed223da9a7eeb3d3af5909 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 26 Mar 2024 13:18:31 -0700 Subject: [PATCH 063/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a6d26306b9e5..5bff177d13c5 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -39,12 +39,6 @@ jobs: with: clear-repository: true - - name: Common docker setup - uses: ./.github/actions/docker_setup - with: - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" @@ -67,12 +61,6 @@ jobs: with: clear-repository: true - - name: Common docker setup - uses: ./.github/actions/docker_setup - with: - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - - name: Images check run: | cd "$GITHUB_WORKSPACE/tests/ci" From 23dd03d1f351961f7bc4ac78cd37cf6658a2e69a Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 08:57:48 -0700 Subject: [PATCH 064/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 1341 ++++++++---------------- 1 file changed, 428 insertions(+), 913 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5bff177d13c5..435f00e0abf9 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -9,9 +9,6 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - REGRESSION_RESULTS_URL: altinity-build-artifacts/${{github.event.number}}/$GITHUB_SHA - REGRESSION_ARM_COMMIT: 19e8624c5e4ccc65b128d27b19836c0570e53991 - on: # yamllint disable-line rule:truthy pull_request: @@ -32,7 +29,7 @@ on: # yamllint disable-line rule:truthy jobs: DockerHubPushAarch64: - runs-on: [self-hosted, style-checker, on-demand, type-cax41, in-hel1, image-arm-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 @@ -43,10 +40,6 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix aarch64 - env: - RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" - RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" - - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: @@ -54,7 +47,7 @@ jobs: path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json DockerHubPushAmd64: - runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 @@ -65,10 +58,6 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix amd64 - env: - RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" - RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" - - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: @@ -77,7 +66,7 @@ jobs: DockerHubPush: needs: [DockerHubPushAmd64, DockerHubPushAarch64] - runs-on: [self-hosted, style-checker, on-demand, type-cpx41, image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-image-x86-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 @@ -115,18 +104,29 @@ jobs: name: changed_images path: ${{ runner.temp }}/changed_images.json - CompatibilityCheck: + CompatibilityCheckX86: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml secrets: inherit with: test_name: Compatibility check X86 - runner_type: style-checker, on-demand, type-cpx41, image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-app-docker-ce timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions + CompatibilityCheckAarch64: + needs: [BuilderDebAarch64] + uses: ./.github/workflows/reusable_test.yml + secrets: inherit + with: + test_name: Compatibility check Aarch64 + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc + ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### @@ -138,7 +138,7 @@ jobs: build_name: package_release checkout_depth: 0 timeout_minutes: 180 - runner_type: builder, on-demand, type-ccx53, image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -149,8 +149,70 @@ jobs: with: build_name: package_aarch64 checkout_depth: 0 - runner_type: builder, on-demand, type-ccx53, image-x86-app-docker-ce + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + + BuilderDebAsan: + needs: [DockerHubPush] + uses: ./.github/workflows/reusable_build.yml + secrets: inherit + with: + build_name: package_asan + checkout_depth: 0 + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + BuilderDebUBsan: + needs: [DockerHubPush] + uses: ./.github/workflows/reusable_build.yml + secrets: inherit + with: + build_name: package_ubsan + checkout_depth: 0 + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + + BuilderDebTsan: + needs: [DockerHubPush] + uses: ./.github/workflows/reusable_build.yml + secrets: inherit + with: + build_name: package_tsan + checkout_depth: 0 + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + + BuilderDebMsan: + needs: [DockerHubPush] + uses: ./.github/workflows/reusable_build.yml + secrets: inherit + with: + build_name: package_msan + checkout_depth: 0 + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + + BuilderDebDebug: + needs: [DockerHubPush] + uses: ./.github/workflows/reusable_build.yml + secrets: inherit + with: + build_name: package_debug + checkout_depth: 0 + timeout_minutes: 180 + runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + additional_envs: | + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable ############################################################################################ ##################################### Docker images ####################################### @@ -159,7 +221,7 @@ jobs: needs: - BuilderDebRelease - BuilderDebAarch64 - runs-on: [self-hosted, style-checker, on-demand, type-cpx51, image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce] timeout-minutes: 180 steps: - name: Check out repository code @@ -171,9 +233,10 @@ jobs: - name: Check docker altinityinfra/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ + docker buildx create --use + python3 docker_server.py --release-type head \ --image-repo altinityinfra/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --no-push \ + python3 docker_server.py --release-type head \ --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() @@ -194,7 +257,7 @@ jobs: secrets: inherit with: test_name: ClickHouse build check - runner_type: style-checker, on-demand, type-cpx31, image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-app-docker-ce timeout_minutes: 180 additional_envs: | NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionCommonAarch64: - strategy: - fail-fast: false - matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_ARM_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionBenchmarkAmd64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: benchmark-${{ matrix.STORAGE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionBenchmarkAarch64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_ARM_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: benchmark-${{ matrix.STORAGE }}-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionClickHouseKeeperSSLAmd64: - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-ssl-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionClickHouseKeeperSSLAarch64: - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-ssl-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionLDAPAmd64: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-${{ matrix.SUITE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionLDAPAarch64: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_ARM_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-${{ matrix.SUITE }}-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionParquetAmd64: - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - --storage minio - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --storage gcs - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionS3Amd64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionS3Aarch64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_ARM_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionTieredStorageS3Amd64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cpx51, image-x86-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-amd64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - RegressionTieredStorageS3Aarch64: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [RegressionStart] - runs-on: [self-hosted, regression-tester, on-demand, type-cax41, image-arm-app-docker-ce] - timeout-minutes: 180 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_ARM_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - env: - artifact_s3_dir: build/v${{ env.version }}/$GITHUB_SHA - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-aarch64-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + RegressionTestsRelease: + needs: [BuilderReport] + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 6da94b78dc53cb8965ab56c04a89ebf54ed04cbc + arch: release + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} + + RegressionTestsAarch64: + needs: [BuilderReport] + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression + commit: 6da94b78dc53cb8965ab56c04a89ebf54ed04cbc + arch: aarch64 + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} SignRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, on-demand, type-cpx41, image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce] timeout-minutes: 180 steps: - name: Set envs @@ -1232,29 +741,35 @@ jobs: - DockerHubPush - DockerServerImages - BuilderReport - # - BuilderSpecialReport - MarkReleaseReady + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - FunctionalStatelessTestAarch64 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan + - FunctionalStatelessTestUBsan + - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - FunctionalStatefulTestAarch64 + - FunctionalStatefulTestAsan + - FunctionalStatefulTestTsan + - FunctionalStatefulTestMsan + - FunctionalStatefulTestUBsan + - StressTestDebug + - StressTestAsan + - StressTestTsan + - StressTestMsan + - StressTestUBsan + - IntegrationTestsAsan + - IntegrationTestsTsan - IntegrationTestsRelease - - CompatibilityCheck - - RegressionCommonAmd64 - - RegressionCommonAarch64 - - RegressionBenchmarkAmd64 - - RegressionBenchmarkAarch64 - - RegressionClickHouseKeeperSSLAmd64 - - RegressionClickHouseKeeperSSLAarch64 - - RegressionLDAPAmd64 - - RegressionLDAPAarch64 - - RegressionParquetAmd64 - - RegressionS3Amd64 - - RegressionS3Aarch64 - - RegressionTieredStorageS3Amd64 - - RegressionTieredStorageS3Aarch64 + - CompatibilityCheckX86 + - CompatibilityCheckAarch64 + - RegressionTestsRelease + - RegressionTestsAarch64 - SignRelease - runs-on: [self-hosted, style-checker, on-demand, type-cpx31, image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx31, altinity-image-x86-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 From 9a8d06dec3a3dd88ac8e566d79f3f705cb496b5e Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:01:52 -0700 Subject: [PATCH 065/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 435f00e0abf9..90737d222549 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -40,6 +40,10 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix aarch64 + env: + RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" + RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" + - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: @@ -58,6 +62,10 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_images_check.py --suffix amd64 + env: + RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" + RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" + - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: From 0dac80cdd0fef3d44b3515502c86a7dc7e05a033 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:02:43 -0700 Subject: [PATCH 066/111] Create regression.yml --- .github/workflows/regression.yml | 515 +++++++++++++++++++++++++++++++ 1 file changed, 515 insertions(+) create mode 100644 .github/workflows/regression.yml diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 000000000000..cc89340617a6 --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,515 @@ +name: Regression test workflow +'on': + workflow_call: + inputs: + runner_type: + description: the label of runner to use, can be a simple string or a comma-separated list + required: true + type: string + commit: + description: commit hash of the regression tests. + required: true + type: string + arch: + description: arch to run the tests on. + required: true + type: string + timeout_minutes: + description: Maximum number of minutes to let workflow run before GitHub cancels it. + default: 210 + type: number + build_sha: + description: commit sha of the workflow run for artifact upload. + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true + AWS_REPORT_KEY_ID: + description: aws s3 key id used for regression test reports. + required: true + AWS_REPORT_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression test reports. + required: true + AWS_REPORT_REGION: + description: aws s3 region used for regression test reports. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true + REGRESSION_AWS_S3_BUCKET: + description: aws s3 bucket used for regression tests. + required: true + REGRESSION_AWS_S3_KEY_ID: + description: aws s3 key id used for regression tests. + required: true + REGRESSION_AWS_S3_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression tests. + required: true + REGRESSION_AWS_S3_REGION: + description: aws s3 region used for regression tests. + required: true + REGRESSION_GCS_KEY_ID: + description: gcs key id used for regression tests. + required: true + REGRESSION_GCS_KEY_SECRET: + description: gcs key secret used for regression tests. + required: true + REGRESSION_GCS_URI: + description: gcs uri used for regression tests. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output classic + --parallel 1 + --log raw.log + artifacts: builds + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, alter, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ClickHouseKeeperSSL: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + path: ${{ env.artifact_paths }} + + LDAP: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + Parquet: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ParquetS3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + STORAGE=${{ matrix.STORAGE}} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + S3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + TieredStorage: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} From c0660008bb999dce089b46b183a5e52217111e9f Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:53:12 -0700 Subject: [PATCH 067/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 90737d222549..432d5fc7880d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -576,6 +576,7 @@ jobs: ASTFuzzerTestAsan: needs: [BuilderDebAsan] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: AST fuzzer (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -586,6 +587,7 @@ jobs: ASTFuzzerTestTsan: needs: [BuilderDebTsan] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: AST fuzzer (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -596,6 +598,7 @@ jobs: ASTFuzzerTestUBSan: needs: [BuilderDebUBsan] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: AST fuzzer (ubsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -606,6 +609,7 @@ jobs: ASTFuzzerTestMSan: needs: [BuilderDebMsan] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: AST fuzzer (msan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -616,6 +620,7 @@ jobs: ASTFuzzerTestDebug: needs: [BuilderDebDebug] uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: AST fuzzer (debug) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce From 5afcac8c14a4d0a8f512d158b22315a37a7bf23b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:38:31 -0700 Subject: [PATCH 068/111] Update ci-runner.py --- tests/integration/ci-runner.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 02ff73ef4b66..905f3e942a71 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -16,7 +16,7 @@ MAX_RETRY = 3 -NUM_WORKERS = 5 +NUM_WORKERS = 10 SLEEP_BETWEEN_RETRIES = 5 PARALLEL_GROUP_SIZE = 100 CLICKHOUSE_BINARY_PATH = "usr/bin/clickhouse" @@ -675,7 +675,7 @@ def run_test_group( test_cmd = " ".join([shlex.quote(test) for test in sorted(test_names)]) parallel_cmd = ( - " --parallel {} ".format(num_workers) if num_workers > 0 else "" + " --parallel {} ".format(num_workers) if (num_workers > 0 or i > 0) else "" ) # For each re-run reduce number of workers, # to improve chances of tests passing. @@ -1080,4 +1080,3 @@ def write_results(results_file, status_file, results, status): out_results_file = os.path.join(str(runner.path()), "test_results.tsv") out_status_file = os.path.join(str(runner.path()), "check_status.tsv") write_results(out_results_file, out_status_file, test_results, status) - logging.info("Result written") From 2d1fe7079bfc64b059fead6aae6b34f38d0484fa Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:40:14 -0700 Subject: [PATCH 069/111] Update ci-runner.py --- tests/integration/ci-runner.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 905f3e942a71..06746fc919d4 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -1080,3 +1080,4 @@ def write_results(results_file, status_file, results, status): out_results_file = os.path.join(str(runner.path()), "test_results.tsv") out_status_file = os.path.join(str(runner.path()), "check_status.tsv") write_results(out_results_file, out_status_file, test_results, status) + logging.info("Result written") From 9247a623426d397b9d367f1e52c67f488c708bee Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 29 Mar 2024 10:13:54 -0700 Subject: [PATCH 070/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5bff177d13c5..7fb822882b2f 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -307,7 +307,7 @@ jobs: secrets: inherit with: test_name: Integration tests (release) - runner_type: stress-tester, func-tester + runner_type: self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce batches: 6 run_command: | cd "$REPO_COPY/tests/ci" From 9bef402af8114b280267a849b2a4b1dc22c236fc Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Sat, 30 Mar 2024 15:07:03 -0700 Subject: [PATCH 071/111] Update cluster.py --- tests/integration/helpers/cluster.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index 4282f1895ac2..4bbc42fc2673 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -3688,7 +3688,9 @@ def http_query_and_get_answer_with_error( method = "POST" if data else "GET" r = requester.request(method, url, data=data, auth=auth, timeout=timeout) - + # Force encoding to UTF-8 + r.encoding = "UTF-8" + if r.ok: return (r.content if content else r.text, None) From 164fb9d37b196b109cc81b1d0dec491c4b2c3fc0 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 3 Apr 2024 10:38:19 -0700 Subject: [PATCH 072/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 432d5fc7880d..ffc0ea5f9ded 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -692,7 +692,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 6da94b78dc53cb8965ab56c04a89ebf54ed04cbc + commit: b029b6a1b48cc99b8621bc32ee0f5bc26e65f002 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -702,7 +702,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 6da94b78dc53cb8965ab56c04a89ebf54ed04cbc + commit: b029b6a1b48cc99b8621bc32ee0f5bc26e65f002 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From 3e58c0bbddc9e3c210b1fb8ef5fb9d591bae2437 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 3 Apr 2024 12:40:17 -0700 Subject: [PATCH 073/111] Update images.json --- docker/images.json | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docker/images.json b/docker/images.json index 5242ab82fc44..17fcda59c017 100644 --- a/docker/images.json +++ b/docker/images.json @@ -71,38 +71,47 @@ "dependent": [] }, "docker/test/integration/s3_proxy": { + "only_amd64": true, "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { + "only_amd64": true, "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { + "only_amd64": true, "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { + "only_amd64": true, "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { + "only_amd64": true, "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { + "only_amd64": true, "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { + "only_amd64": true, "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { + "only_amd64": true, "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { + "only_amd64": true, "name": "altinityinfra/postgresql-java-client", "dependent": [] }, @@ -144,6 +153,7 @@ "dependent": [] }, "docker/test/integration/nginx_dav": { + "only_amd64": true, "name": "altinityinfra/nginx-dav", "dependent": [] } From fe847923709bc09583f4a332e3d8cc14da69f955 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:58:29 -0700 Subject: [PATCH 074/111] Update images.json --- docker/images.json | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/docker/images.json b/docker/images.json index 17fcda59c017..5242ab82fc44 100644 --- a/docker/images.json +++ b/docker/images.json @@ -71,47 +71,38 @@ "dependent": [] }, "docker/test/integration/s3_proxy": { - "only_amd64": true, "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "only_amd64": true, "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "only_amd64": true, "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "only_amd64": true, "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "only_amd64": true, "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "only_amd64": true, "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "only_amd64": true, "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "only_amd64": true, "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "only_amd64": true, "name": "altinityinfra/postgresql-java-client", "dependent": [] }, @@ -153,7 +144,6 @@ "dependent": [] }, "docker/test/integration/nginx_dav": { - "only_amd64": true, "name": "altinityinfra/nginx-dav", "dependent": [] } From 2670a444df9025629d98078197cc31f98f3ef469 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 3 Apr 2024 19:04:10 -0700 Subject: [PATCH 075/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ffc0ea5f9ded..8b36e6b15b3b 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -65,6 +65,8 @@ jobs: env: RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" + + - run: sleep 3600 - name: Upload images files to artifacts uses: actions/upload-artifact@v3 From f534e558016dbdc640b5962b0b89729b88df9a7d Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 3 Apr 2024 20:39:33 -0700 Subject: [PATCH 076/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 8b36e6b15b3b..91f2b9c72993 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -66,8 +66,6 @@ jobs: RUNNER_IP: "$(hostname -I | cut -d ' ' -f 1)" RUNNER_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root@$(hostname -I | cut -d ' ' -f 1)" - - run: sleep 3600 - - name: Upload images files to artifacts uses: actions/upload-artifact@v3 with: From 956d768cbefec22522467365539b6a1d466a8c0e Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 4 Apr 2024 14:43:39 +0000 Subject: [PATCH 077/111] Increased sleep time on test to make it more stable Similar to https://github.com/Altinity/ClickHouse/pull/321 (23.8.5) --- tests/integration/test_drop_is_lock_free/test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_drop_is_lock_free/test.py b/tests/integration/test_drop_is_lock_free/test.py index 61d52a1d9b11..dc6912a9df38 100644 --- a/tests/integration/test_drop_is_lock_free/test.py +++ b/tests/integration/test_drop_is_lock_free/test.py @@ -104,7 +104,7 @@ def test_query_is_lock_free(lock_free_query, exclusive_table): select_handler = node.get_query_request( f""" - SELECT sleepEachRow(3) FROM {exclusive_table} SETTINGS function_sleep_max_microseconds_per_block = 0; + SELECT sleepEachRow(5) FROM {exclusive_table} SETTINGS function_sleep_max_microseconds_per_block = 0; """, query_id=query_id, ) @@ -173,7 +173,7 @@ def test_query_is_permanent(transaction, permanent, exclusive_table): select_handler = node.get_query_request( f""" - SELECT sleepEachRow(3) FROM {exclusive_table} SETTINGS function_sleep_max_microseconds_per_block = 0; + SELECT sleepEachRow(5) FROM {exclusive_table} SETTINGS function_sleep_max_microseconds_per_block = 0; """, query_id=query_id, ) From 6357cba09107ae9e39eaaaec881ef0e12130ce33 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 4 Apr 2024 08:24:01 -0700 Subject: [PATCH 078/111] Remove Fuzzers --- .github/workflows/release_branches.yml | 58 -------------------------- 1 file changed, 58 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 91f2b9c72993..ba13064b03a2 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -570,64 +570,6 @@ jobs: cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" -############################################################################################## -##################################### AST FUZZERS ############################################ -############################################################################################## - ASTFuzzerTestAsan: - needs: [BuilderDebAsan] - uses: ./.github/workflows/reusable_test.yml - secrets: inherit - with: - test_name: AST fuzzer (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce - run_command: | - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - ASTFuzzerTestTsan: - needs: [BuilderDebTsan] - uses: ./.github/workflows/reusable_test.yml - secrets: inherit - with: - test_name: AST fuzzer (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce - run_command: | - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - ASTFuzzerTestUBSan: - needs: [BuilderDebUBsan] - uses: ./.github/workflows/reusable_test.yml - secrets: inherit - with: - test_name: AST fuzzer (ubsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce - run_command: | - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - ASTFuzzerTestMSan: - needs: [BuilderDebMsan] - uses: ./.github/workflows/reusable_test.yml - secrets: inherit - with: - test_name: AST fuzzer (msan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce - run_command: | - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - ASTFuzzerTestDebug: - needs: [BuilderDebDebug] - uses: ./.github/workflows/reusable_test.yml - secrets: inherit - with: - test_name: AST fuzzer (debug) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce - run_command: | - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# From a6e0c6e363e23f7a0ca9396380fd51ae7847c883 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:21:23 -0700 Subject: [PATCH 079/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ba13064b03a2..a4621a1307ee 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -146,7 +146,7 @@ jobs: build_name: package_release checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -158,7 +158,7 @@ jobs: build_name: package_aarch64 checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -170,7 +170,7 @@ jobs: build_name: package_asan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -182,7 +182,7 @@ jobs: build_name: package_ubsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -194,7 +194,7 @@ jobs: build_name: package_tsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -206,7 +206,7 @@ jobs: build_name: package_msan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -218,7 +218,7 @@ jobs: build_name: package_debug checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable From 0f433f4361571de6b2d24ef04e4b4cfff66ce283 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:43:22 -0700 Subject: [PATCH 080/111] Update sign_release.py --- tests/ci/sign_release.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 701ccc29b65b..872966a578a5 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -7,6 +7,7 @@ from pr_info import PRInfo from build_download_helper import download_builds_filter import hashlib +from pathlib import Path GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") @@ -57,9 +58,9 @@ def main(): s3_helper = S3Helper() - s3_path_prefix = f"{pr_info.number}/{pr_info.sha}/" + CHECK_NAME.lower().replace( + s3_path_prefix = Path(f"{pr_info.number}/{pr_info.sha}/" + CHECK_NAME.lower().replace( " ", "_" - ).replace("(", "_").replace(")", "_").replace(",", "_") + ).replace("(", "_").replace(")", "_").replace(",", "_")) # downloads `package_release` artifacts generated download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) @@ -68,8 +69,8 @@ def main(): full_path = os.path.join(TEMP_PATH, f) hashed_file_path = hash_file(full_path) signed_file_path = sign_file(hashed_file_path) - s3_path = f'{s3_path_prefix}/{os.path.basename(signed_file_path)}' - s3_helper.upload_build_file_to_s3(signed_file_path, s3_path) + s3_path = s3_path_prefix / os.path.basename(signed_file_path) + s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path)) print(f'Uploaded file {signed_file_path} to {s3_path}') # Signed hashes are: From 5335c35e7c7ac21c68819088a0fa87b0649021ce Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 5 Apr 2024 08:33:04 -0700 Subject: [PATCH 081/111] Update regression.yml --- .github/workflows/regression.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index cc89340617a6..170e89b27b00 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -16,7 +16,7 @@ name: Regression test workflow type: string timeout_minutes: description: Maximum number of minutes to let workflow run before GitHub cancels it. - default: 210 + default: 240 type: number build_sha: description: commit sha of the workflow run for artifact upload. @@ -394,6 +394,7 @@ jobs: run: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --only "/parquet/${{ matrix.STORAGE }}/*" --storage ${{ matrix.STORAGE }} --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} From d0401db5ed418d83dd9ec190d70113d4bbb38d50 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 5 Apr 2024 08:33:36 -0700 Subject: [PATCH 082/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a4621a1307ee..5e84e4a12fc3 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -634,7 +634,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: b029b6a1b48cc99b8621bc32ee0f5bc26e65f002 + commit: 55cf702a8e77ef361084fb017d2ef072952d6367 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -644,7 +644,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: b029b6a1b48cc99b8621bc32ee0f5bc26e65f002 + commit: 55cf702a8e77ef361084fb017d2ef072952d6367 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From 709f6de6b6bd4f884d1453f7f2f009053143f5d9 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 5 Apr 2024 12:43:05 -0700 Subject: [PATCH 083/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5e84e4a12fc3..9169a23ebc3f 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -146,7 +146,7 @@ jobs: build_name: package_release checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -158,7 +158,7 @@ jobs: build_name: package_aarch64 checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -170,7 +170,7 @@ jobs: build_name: package_asan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -182,7 +182,7 @@ jobs: build_name: package_ubsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -194,7 +194,7 @@ jobs: build_name: package_tsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -206,7 +206,7 @@ jobs: build_name: package_msan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -218,7 +218,7 @@ jobs: build_name: package_debug checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable From 1fb484d6a7a0f2716a4a092deba98125c2639265 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 5 Apr 2024 12:44:02 -0700 Subject: [PATCH 084/111] Update cancel.yml --- .github/workflows/cancel.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cancel.yml b/.github/workflows/cancel.yml index 3c2be767ad22..69d1d5e9f392 100644 --- a/.github/workflows/cancel.yml +++ b/.github/workflows/cancel.yml @@ -11,7 +11,7 @@ on: # yamllint disable-line rule:truthy - requested jobs: cancel: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cx11, altinity-setup-none] steps: - uses: styfle/cancel-workflow-action@0.9.1 with: From e29eac935b9343a152db043e2dbdfb701c80f4fd Mon Sep 17 00:00:00 2001 From: robot-clickhouse-ci-1 <118761991+robot-clickhouse-ci-1@users.noreply.github.com> Date: Sat, 14 Oct 2023 17:50:04 +0200 Subject: [PATCH 085/111] Merge pull request #55627 from azat/tests/fix-test_system_merges Fix flakiness of test_system_merges (by increasing sleep interval properly) --- .../test_system_merges/configs/user_overrides.xml | 7 +++++++ tests/integration/test_system_merges/test.py | 6 ++++-- 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 tests/integration/test_system_merges/configs/user_overrides.xml diff --git a/tests/integration/test_system_merges/configs/user_overrides.xml b/tests/integration/test_system_merges/configs/user_overrides.xml new file mode 100644 index 000000000000..ca0a435aee71 --- /dev/null +++ b/tests/integration/test_system_merges/configs/user_overrides.xml @@ -0,0 +1,7 @@ + + + + 10G + + + diff --git a/tests/integration/test_system_merges/test.py b/tests/integration/test_system_merges/test.py index 5f0fc7b4d84f..d0fa7a1d4262 100644 --- a/tests/integration/test_system_merges/test.py +++ b/tests/integration/test_system_merges/test.py @@ -10,6 +10,7 @@ node1 = cluster.add_instance( "node1", main_configs=["configs/logs_config.xml"], + user_configs=["configs/user_overrides.xml"], with_zookeeper=True, macros={"shard": 0, "replica": 1}, ) @@ -17,6 +18,7 @@ node2 = cluster.add_instance( "node2", main_configs=["configs/logs_config.xml"], + user_configs=["configs/user_overrides.xml"], with_zookeeper=True, macros={"shard": 0, "replica": 2}, ) @@ -183,10 +185,10 @@ def test_mutation_simple(started_cluster, replicated): starting_block, starting_block, starting_block + 1 ) - # ALTER will sleep for 3s * 3 (rows) = 9s + # ALTER will sleep for 9s def alter(): node1.query( - f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(9) = 0", + f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(9) OR 1", settings=settings, ) From c9f49bc625a4aae5b07c3cb0d572ea1290f5e7d6 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Mon, 8 Apr 2024 11:59:02 -0700 Subject: [PATCH 086/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9169a23ebc3f..9005a0dac14e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -170,7 +170,7 @@ jobs: build_name: package_asan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -182,7 +182,7 @@ jobs: build_name: package_ubsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -194,7 +194,7 @@ jobs: build_name: package_tsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -206,7 +206,7 @@ jobs: build_name: package_msan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable From a4c901e0a566bf22701db76c3026086f62f71530 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Mon, 8 Apr 2024 20:04:54 -0700 Subject: [PATCH 087/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9005a0dac14e..a3cee1f63783 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -522,6 +522,7 @@ jobs: with: test_name: Stress test (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-snapshot-docker_ipv6_x86 + timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -533,6 +534,7 @@ jobs: with: test_name: Stress test (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-snapshot-docker_ipv6_x86 + timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -555,6 +557,7 @@ jobs: with: test_name: Stress test (ubsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-snapshot-docker_ipv6_x86 + timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -566,6 +569,7 @@ jobs: with: test_name: Stress test (debug) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-snapshot-docker_ipv6_x86 + timeout_minutes: 180 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" From c14eaa260c3fed23a8406b01dc23c22c97d54c4e Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 9 Apr 2024 15:48:23 +0200 Subject: [PATCH 088/111] Attempt to debug the test longer timeout + some debug output --- tests/integration/test_system_merges/test.py | 22 ++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_system_merges/test.py b/tests/integration/test_system_merges/test.py index d0fa7a1d4262..98ec9e6adea5 100644 --- a/tests/integration/test_system_merges/test.py +++ b/tests/integration/test_system_merges/test.py @@ -5,6 +5,8 @@ from helpers.cluster import ClickHouseCluster from helpers.test_tools import assert_eq_with_retry +import logging + cluster = ClickHouseCluster(__file__) node1 = cluster.add_instance( @@ -188,9 +190,21 @@ def test_mutation_simple(started_cluster, replicated): # ALTER will sleep for 9s def alter(): node1.query( - f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(9) OR 1", + f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(12) OR 1", settings=settings, ) + def debug_merges(): + logging.debug("going to print what is going on in system.merges") + for i in range(10): + logging.debug("Merges:") + logging.debug(node1.query( + f"select * from system.merges", + settings=settings, + )) + time.sleep(1) + + t_debug_merges = threading.Thread(target=debug_merges) + t_debug_merges.start() t = threading.Thread(target=alter) t.start() @@ -203,7 +217,11 @@ def alter(): retry_count=30, sleep_time=0.1, ) - + node1.query( + f"SYSTEM START MERGES {name}", + settings=settings, + ) + time.sleep(3) # give merges chance to start assert ( split_tsv( node_check.query( From 04f5e9df9ca6b4ca2e0f7a8a0b86449914eb51da Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 09:40:53 -0700 Subject: [PATCH 089/111] Update build_check.py --- tests/ci/build_check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index e0bd37181a34..2ea3ae75a0ee 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -491,7 +491,7 @@ def main(): log_url, f"Build ({build_name})", ) - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) # Fail the build job if it didn't succeed if build_status != SUCCESS: From 31519f5775b18b0a2430688f7b59695cadb7fece Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 10:06:33 -0700 Subject: [PATCH 090/111] Update build caching --- tests/ci/build_check.py | 34 +++------------------------------- 1 file changed, 3 insertions(+), 31 deletions(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 2ea3ae75a0ee..c4039763a2ca 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -12,12 +12,13 @@ from ccache_utils import CargoCache from docker_pull_helper import get_image_with_version from env_helper import ( - CACHES_PATH, GITHUB_JOB_API_URL, IMAGES_PATH, REPO_COPY, + S3_ACCESS_KEY_ID, S3_BUILDS_BUCKET, S3_DOWNLOAD, + S3_SECRET_ACCESS_KEY, TEMP_PATH, CLICKHOUSE_STABLE_VERSION_SUFFIX, ) @@ -38,8 +39,6 @@ get_instance_type, ) from stopwatch import Stopwatch -from ccache_utils import get_ccache_if_not_exists, upload_ccache - IMAGE_NAME = "altinityinfra/binary-builder" BUILD_LOG_NAME = "build_log.log" @@ -62,7 +61,6 @@ def get_packager_cmd( cargo_cache_dir: Path, build_version: str, image_version: str, - ccache_path: str, official: bool, ) -> str: package_type = build_config.package_type @@ -80,9 +78,7 @@ def get_packager_cmd( if build_config.tidy: cmd += " --clang-tidy" - # NOTE(vnemkov): we are going to continue to use ccache for now - cmd += " --cache=ccache" - cmd += f" --ccache-dir={ccache_path}" + cmd += " --cache=sccache" cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" cmd += f" --cargo-cache-dir={cargo_cache_dir}" @@ -278,24 +274,6 @@ def main(): ) cargo_cache.download() - # NOTE(vnemkov): since we still want to use CCACHE over SCCACHE, unlike upstream, - # we need to create local directory for that, just as with 22.8 - ccache_path = Path(CACHES_PATH, build_name + "_ccache") - - logging.info("Will try to fetch cache for our build") - try: - get_ccache_if_not_exists( - ccache_path, s3_helper, pr_info.number, temp_path, pr_info.release_pr - ) - except Exception as e: - # In case there are issues with ccache, remove the path and do not fail a build - logging.info("Failed to get ccache, building without it. Error: %s", e) - rmtree(ccache_path, ignore_errors=True) - - if not ccache_path.exists(): - logging.info("cache was not fetched, will create empty dir") - ccache_path.mkdir(parents=True) - packager_cmd = get_packager_cmd( build_config, repo_path / "docker" / "packager", @@ -303,7 +281,6 @@ def main(): cargo_cache.directory, version.string, image_version, - ccache_path, official_flag, ) @@ -320,7 +297,6 @@ def main(): subprocess.check_call( f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True ) - subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True) logging.info("Build finished as %s, log path %s", build_status, log_path) if build_status == SUCCESS: cargo_cache.upload() @@ -334,10 +310,6 @@ def main(): ) sys.exit(1) - # Upload the ccache first to have the least build time in case of problems - logging.info("Will upload cache") - upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path) - # FIXME performance performance_urls = [] performance_path = build_output_path / "performance.tar.zst" From 729d9448048dda867f7cc504e4fbe917c69d625d Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 10:59:43 -0700 Subject: [PATCH 091/111] Update cancel.yml --- .github/workflows/cancel.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cancel.yml b/.github/workflows/cancel.yml index 69d1d5e9f392..46ff5794b5ba 100644 --- a/.github/workflows/cancel.yml +++ b/.github/workflows/cancel.yml @@ -11,7 +11,7 @@ on: # yamllint disable-line rule:truthy - requested jobs: cancel: - runs-on: [self-hosted, altinity-on-demand, altinity-type-cx11, altinity-setup-none] + runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.9.1 with: From f4183233e52ea1e702d4970ae96cf90dc0db4f63 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 11:00:20 -0700 Subject: [PATCH 092/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a3cee1f63783..e607d9cd9e14 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -170,7 +170,7 @@ jobs: build_name: package_asan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -182,7 +182,7 @@ jobs: build_name: package_ubsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -194,7 +194,7 @@ jobs: build_name: package_tsan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable @@ -206,7 +206,7 @@ jobs: build_name: package_msan checkout_depth: 0 timeout_minutes: 180 - runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx63, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-setup-builder, altinity-type-ccx53, altinity-in-ash, altinity-image-x86-app-docker-ce additional_envs: | CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable From a57f5e8138af99e6824324290e12ebaa367109e6 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 12:13:16 -0700 Subject: [PATCH 093/111] Update env_helper.py --- tests/ci/env_helper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 08952ed3179d..38592706c1c6 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -26,7 +26,9 @@ REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) +S3_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID") S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "altinity-build-artifacts") +S3_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY") S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts") S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") CLICKHOUSE_STABLE_VERSION_SUFFIX = os.getenv("CLICKHOUSE_STABLE_VERSION_SUFFIX", "stable") From 6a2fe92826fb898381691c4598a54e392f7ce9f8 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 13:01:21 -0700 Subject: [PATCH 094/111] Update build_check.py --- tests/ci/build_check.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index c4039763a2ca..4a1517a63ede 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -82,6 +82,8 @@ def get_packager_cmd( cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" cmd += f" --cargo-cache-dir={cargo_cache_dir}" + cmd += f" --s3-access-key-id={S3_ACCESS_KEY_ID}" + cmd += f" --s3-secret-access-key={S3_SECRET_ACCESS_KEY}" if build_config.additional_pkgs: cmd += " --additional-pkgs" From e9744ecba7074a4464fed4afbc7871fec505a920 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 13:03:24 -0700 Subject: [PATCH 095/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e607d9cd9e14..36473201c37c 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -638,7 +638,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 55cf702a8e77ef361084fb017d2ef072952d6367 + commit: ed5f2f0d9f33f46eb8962c2f2d974701c86f187e arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -648,7 +648,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 55cf702a8e77ef361084fb017d2ef072952d6367 + commit: ed5f2f0d9f33f46eb8962c2f2d974701c86f187e arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From f96a3e9d79f9f6261f8c581097327c1f18370cb6 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 10 Apr 2024 00:54:42 +0000 Subject: [PATCH 096/111] More debugging --- tests/integration/test_system_merges/test.py | 23 ++++++++++---------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/tests/integration/test_system_merges/test.py b/tests/integration/test_system_merges/test.py index 98ec9e6adea5..e81137c7e64e 100644 --- a/tests/integration/test_system_merges/test.py +++ b/tests/integration/test_system_merges/test.py @@ -190,18 +190,18 @@ def test_mutation_simple(started_cluster, replicated): # ALTER will sleep for 9s def alter(): node1.query( - f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(12) OR 1", + f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(16) OR 1", settings=settings, ) + def debug_merges(): logging.debug("going to print what is going on in system.merges") - for i in range(10): - logging.debug("Merges:") - logging.debug(node1.query( - f"select * from system.merges", + for i in range(1000): + logging.debug("Merges:", node1.query( + f"select now(), * from system.merges FORMAT Pretty", settings=settings, )) - time.sleep(1) + time.sleep(0.1) t_debug_merges = threading.Thread(target=debug_merges) t_debug_merges.start() @@ -209,6 +209,11 @@ def debug_merges(): t = threading.Thread(target=alter) t.start() + node1.query( + f"SYSTEM START MERGES {name}", + settings=settings, + ) + # Wait for the mutation to actually start assert_eq_with_retry( node_check, @@ -217,11 +222,7 @@ def debug_merges(): retry_count=30, sleep_time=0.1, ) - node1.query( - f"SYSTEM START MERGES {name}", - settings=settings, - ) - time.sleep(3) # give merges chance to start + assert ( split_tsv( node_check.query( From 66ab925f73867aa78276465b0a88a9f3e4d60d7b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 10 Apr 2024 02:53:30 +0000 Subject: [PATCH 097/111] Fixed the test --- tests/integration/test_system_merges/test.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/tests/integration/test_system_merges/test.py b/tests/integration/test_system_merges/test.py index e81137c7e64e..6187a5f94e04 100644 --- a/tests/integration/test_system_merges/test.py +++ b/tests/integration/test_system_merges/test.py @@ -5,8 +5,6 @@ from helpers.cluster import ClickHouseCluster from helpers.test_tools import assert_eq_with_retry -import logging - cluster = ClickHouseCluster(__file__) node1 = cluster.add_instance( @@ -190,22 +188,10 @@ def test_mutation_simple(started_cluster, replicated): # ALTER will sleep for 9s def alter(): node1.query( - f"ALTER TABLE {name} UPDATE a = 42 WHERE sleep(16) OR 1", + f"ALTER TABLE {name} UPDATE a = 42 WHERE ignore(sleep(9)) == 0", settings=settings, ) - def debug_merges(): - logging.debug("going to print what is going on in system.merges") - for i in range(1000): - logging.debug("Merges:", node1.query( - f"select now(), * from system.merges FORMAT Pretty", - settings=settings, - )) - time.sleep(0.1) - - t_debug_merges = threading.Thread(target=debug_merges) - t_debug_merges.start() - t = threading.Thread(target=alter) t.start() From 3487bb9c20cd50e6d4e1c98a0ccbbae3ea3e163b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 9 Apr 2024 22:45:33 -0700 Subject: [PATCH 098/111] Update packager --- docker/packager/packager | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/packager/packager b/docker/packager/packager index c8025b64fc54..72bb8b565c24 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -125,9 +125,11 @@ def parse_env_variables( sanitizer: str, package_type: str, cache: str, + s3_access_key_id: str, s3_bucket: str, s3_directory: str, s3_rw_access: bool, + s3_secret_access_key: str, clang_tidy: bool, version: str, official: bool, @@ -294,6 +296,10 @@ def parse_env_variables( result.append(f"SCCACHE_S3_KEY_PREFIX={sccache_dir}") if not s3_rw_access: result.append("SCCACHE_S3_NO_CREDENTIALS=true") + if s3_access_key_id: + result.append(f"AWS_ACCESS_KEY_ID={s3_access_key_id}") + if s3_secret_access_key: + result.append(f"AWS_SECRET_ACCESS_KEY={s3_secret_access_key}") if clang_tidy: # `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above. @@ -413,6 +419,14 @@ def parse_args() -> argparse.Namespace: type=dir_name, help="a directory with ccache", ) + parser.add_argument( + "--s3-access-key-id", + help="an S3 access key id used for sscache bucket", + ) + parser.add_argument( + "--s3-secret-access-key", + help="an S3 secret access key used for sscache bucket", + ) parser.add_argument( "--s3-bucket", help="an S3 bucket used for sscache and clang-tidy-cache", @@ -494,10 +508,11 @@ def main() -> None: args.compiler, args.sanitizer, args.package_type, - args.cache, + args.s3_access_key_id, args.s3_bucket, args.s3_directory, args.s3_rw_access, + args.s3_secret_access_key, args.clang_tidy, args.version, args.official, From ed0a2b15e7f1c4082ff21aa585f650b14c02e9e5 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 10 Apr 2024 08:20:00 -0700 Subject: [PATCH 099/111] Update packager --- docker/packager/packager | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/packager/packager b/docker/packager/packager index 72bb8b565c24..333e8080967e 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -508,6 +508,7 @@ def main() -> None: args.compiler, args.sanitizer, args.package_type, + args.cache, args.s3_access_key_id, args.s3_bucket, args.s3_directory, From cd249bb0c92445b07fb406526883359463129fa8 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 10 Apr 2024 11:29:16 -0700 Subject: [PATCH 100/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 36473201c37c..c383d5bc6035 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -638,7 +638,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: ed5f2f0d9f33f46eb8962c2f2d974701c86f187e + commit: 8ca52fb119806bb3ee0d4f6b65fe4b7ad5fb58a1 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -648,7 +648,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: ed5f2f0d9f33f46eb8962c2f2d974701c86f187e + commit: 8ca52fb119806bb3ee0d4f6b65fe4b7ad5fb58a1 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From 028048010bb8b3c22530e3b1b385c0c60afb93c7 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 00:26:10 -0700 Subject: [PATCH 101/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index c383d5bc6035..ec2b8a5b5c44 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -638,7 +638,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 8ca52fb119806bb3ee0d4f6b65fe4b7ad5fb58a1 + commit: f5ab9d5ef344ae1f4c8d5b8f698aceee84947cda arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -648,7 +648,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 8ca52fb119806bb3ee0d4f6b65fe4b7ad5fb58a1 + commit: f5ab9d5ef344ae1f4c8d5b8f698aceee84947cda arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From 3e4a5820011b849cdd35f4505881c536a37ad52c Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 07:57:58 -0700 Subject: [PATCH 102/111] Update regression.yml --- .github/workflows/regression.yml | 45 ++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 170e89b27b00..e788f35ce38c 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -181,6 +181,51 @@ jobs: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} + Alter: + strategy: + fail-fast: false + matrix: + SUITE: ["replace partition", "attach partition", "move partition"] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --only "/alter/${{ env.SUITE }}/*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + Benchmark: strategy: fail-fast: false From df25d10a7e2388d0768d751961e0eaec882256cd Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 10:59:07 -0700 Subject: [PATCH 103/111] Update release_branches.yml --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ec2b8a5b5c44..185493167554 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -638,7 +638,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: f5ab9d5ef344ae1f4c8d5b8f698aceee84947cda + commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} @@ -648,7 +648,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: f5ab9d5ef344ae1f4c8d5b8f698aceee84947cda + commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event_name == 'release' && github.sha }} From 38345b01dea7b8e884b73a45b240adab275fa3fd Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 14:45:18 -0700 Subject: [PATCH 104/111] Update regression.yml --- .github/workflows/regression.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index e788f35ce38c..dfe31ceb3a5c 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -212,7 +212,7 @@ jobs: run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite run: python3 - -u ${{ env.SUITE }}/regression.py + -u alter/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --only "/alter/${{ env.SUITE }}/*" --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" From c27346f6c4b7b75e505fb0844bb96efac6a86c23 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 18:57:46 -0700 Subject: [PATCH 105/111] Update regression.yml --- .github/workflows/regression.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index dfe31ceb3a5c..869d6e17011c 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -185,7 +185,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: ["replace partition", "attach partition", "move partition"] + ONLY: ["replace partition", "attach partition", "move partition"] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -199,7 +199,7 @@ jobs: run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=${{ matrix.SUITE }} + SUITE=alter EOF - name: Download json reports uses: actions/download-artifact@v3 @@ -214,7 +214,7 @@ jobs: run: python3 -u alter/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --only "/alter/${{ env.SUITE }}/*" + --only "/alter/${{ matrix.ONLU }}/*" --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} - name: Create and upload logs @@ -223,7 +223,7 @@ jobs: - uses: actions/upload-artifact@v3 if: always() with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + name: ${{ env.SUITE }}-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} Benchmark: From 6f014976109de10b263d3c09c4d69d59981b0875 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Thu, 11 Apr 2024 23:43:08 -0700 Subject: [PATCH 106/111] Update regression.yml --- .github/workflows/regression.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 869d6e17011c..c7caf3e37621 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -141,7 +141,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, alter, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -185,7 +185,7 @@ jobs: strategy: fail-fast: false matrix: - ONLY: ["replace partition", "attach partition", "move partition"] + ONLY: [replace, attach, move] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -214,7 +214,7 @@ jobs: run: python3 -u alter/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --only "/alter/${{ matrix.ONLU }}/*" + --only "/alter/${{ matrix.ONLY }} partition/*" --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} - name: Create and upload logs From 0be705cb24648133e37988c02e7bb4e8490e61ac Mon Sep 17 00:00:00 2001 From: Nikita Taranov Date: Thu, 2 Nov 2023 17:14:15 +0100 Subject: [PATCH 107/111] Fix wrong attribution of untracked memory to a user/query (#56089) * impl * add tests * add tests * fix typo --- src/Common/MemoryTracker.cpp | 23 +++++++--- src/Common/MemoryTracker.h | 5 +- ...02896_memory_accounting_for_user.reference | 0 .../02896_memory_accounting_for_user.sh | 46 +++++++++++++++++++ 4 files changed, 64 insertions(+), 10 deletions(-) create mode 100644 tests/queries/0_stateless/02896_memory_accounting_for_user.reference create mode 100755 tests/queries/0_stateless/02896_memory_accounting_for_user.sh diff --git a/src/Common/MemoryTracker.cpp b/src/Common/MemoryTracker.cpp index 93bd50a0b498..09f25f5dc70e 100644 --- a/src/Common/MemoryTracker.cpp +++ b/src/Common/MemoryTracker.cpp @@ -1,18 +1,19 @@ #include "MemoryTracker.h" #include -#include -#include -#include #include +#include #include #include -#include -#include -#include #include +#include #include +#include +#include +#include +#include #include +#include #include "config.h" @@ -589,6 +590,16 @@ bool MemoryTracker::isSizeOkForSampling(UInt64 size) const return ((max_allocation_size_bytes == 0 || size <= max_allocation_size_bytes) && size >= min_allocation_size_bytes); } +void MemoryTracker::setParent(MemoryTracker * elem) +{ + /// Untracked memory shouldn't be accounted to a query or a user if it was allocated before the thread was attached + /// to a query thread group or a user group, because this memory will be (🤞) freed outside of these scopes. + if (level == VariableContext::Thread && DB::current_thread) + DB::current_thread->flushUntrackedMemory(); + + parent.store(elem, std::memory_order_relaxed); +} + bool canEnqueueBackgroundTask() { auto limit = background_memory_tracker.getSoftLimit(); diff --git a/src/Common/MemoryTracker.h b/src/Common/MemoryTracker.h index 5041dc2af41f..a17ca421d204 100644 --- a/src/Common/MemoryTracker.h +++ b/src/Common/MemoryTracker.h @@ -196,10 +196,7 @@ class MemoryTracker /// next should be changed only once: from nullptr to some value. /// NOTE: It is not true in MergeListElement - void setParent(MemoryTracker * elem) - { - parent.store(elem, std::memory_order_relaxed); - } + void setParent(MemoryTracker * elem); MemoryTracker * getParent() { diff --git a/tests/queries/0_stateless/02896_memory_accounting_for_user.reference b/tests/queries/0_stateless/02896_memory_accounting_for_user.reference new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/queries/0_stateless/02896_memory_accounting_for_user.sh b/tests/queries/0_stateless/02896_memory_accounting_for_user.sh new file mode 100755 index 000000000000..72f4be1475df --- /dev/null +++ b/tests/queries/0_stateless/02896_memory_accounting_for_user.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# Tags: no-parallel, long + +CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CUR_DIR"/../shell_config.sh + + +total_iterations=16 +parallelism=32 + +$CLICKHOUSE_CLIENT --query='DROP TABLE IF EXISTS test_inserts' +$CLICKHOUSE_CLIENT --query='CREATE TABLE test_inserts ENGINE=Null AS system.numbers' + +run_query() { + ( $CLICKHOUSE_CLIENT --query='SELECT * FROM numbers_mt(1000000) FORMAT CSV' | $CLICKHOUSE_CLIENT --max_threads 8 --max_memory_usage_for_user 1073741824 -q 'INSERT INTO test_inserts FORMAT CSV' 2>/dev/null ) +} + +for ((i = 1; i <= total_iterations; i++)); do + for ((j = 1; j <= parallelism; j++)); do + run_query & pids+=($!) + done + + EXIT_CODE=0 + new_pids=() + for pid in "${pids[@]:0:parallelism}"; do + CODE=0 + wait "${pid}" || CODE=$? + run_query & new_pids+=($!) + if [[ "${CODE}" != "0" ]]; then + EXIT_CODE=1; + fi + done + for pid in "${pids[@]:parallelism}"; do + CODE=0 + wait "${pid}" || CODE=$? + if [[ "${CODE}" != "0" ]]; then + EXIT_CODE=1; + fi + done + pids=("${new_pids[@]}") + + if [[ $EXIT_CODE -ne 0 ]]; then + exit $EXIT_CODE + fi +done From ec866b562b9869fa2fedb2bf2d68f90b1eab8f23 Mon Sep 17 00:00:00 2001 From: Nikita Taranov Date: Tue, 9 Apr 2024 12:32:38 +0000 Subject: [PATCH 108/111] Merge pull request #62208 from arthurpassos/s3_aws_private_link_style Add support for S3 access through aws private link interface --- src/IO/S3/URI.cpp | 14 +++++++++++--- src/IO/S3/URI.h | 1 + src/IO/tests/gtest_s3_uri.cpp | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/src/IO/S3/URI.cpp b/src/IO/S3/URI.cpp index 34590df53973..2e8d892dd006 100644 --- a/src/IO/S3/URI.cpp +++ b/src/IO/S3/URI.cpp @@ -21,12 +21,17 @@ namespace S3 URI::URI(const std::string & uri_) { /// Case when bucket name represented in domain name of S3 URL. - /// E.g. (https://bucket-name.s3.Region.amazonaws.com/key) + /// E.g. (https://bucket-name.s3.region.amazonaws.com/key) /// https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html#virtual-hosted-style-access static const RE2 virtual_hosted_style_pattern(R"((.+)\.(s3|cos|obs|oss)([.\-][a-z0-9\-.:]+))"); + /// Case when AWS Private Link Interface is being used + /// E.g. (bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w.s3.us-east-1.vpce.amazonaws.com/bucket-name/key) + /// https://docs.aws.amazon.com/AmazonS3/latest/userguide/privatelink-interface-endpoints.html + static const RE2 aws_private_link_style_pattern(R"(bucket\.vpce\-([a-z0-9\-.]+)\.vpce.amazonaws.com(:\d{1,5})?)"); + /// Case when bucket name and key represented in path of S3 URL. - /// E.g. (https://s3.Region.amazonaws.com/bucket-name/key) + /// E.g. (https://s3.region.amazonaws.com/bucket-name/key) /// https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html#path-style-access static const RE2 path_style_pattern("^/([^/]*)/(.*)"); @@ -66,7 +71,10 @@ URI::URI(const std::string & uri_) String name; String endpoint_authority_from_uri; - if (re2::RE2::FullMatch(uri.getAuthority(), virtual_hosted_style_pattern, &bucket, &name, &endpoint_authority_from_uri)) + bool is_using_aws_private_link_interface = re2::RE2::FullMatch(uri.getAuthority(), aws_private_link_style_pattern); + + if (!is_using_aws_private_link_interface + && re2::RE2::FullMatch(uri.getAuthority(), virtual_hosted_style_pattern, &bucket, &name, &endpoint_authority_from_uri)) { is_virtual_hosted_style = true; endpoint = uri.getScheme() + "://" + name + endpoint_authority_from_uri; diff --git a/src/IO/S3/URI.h b/src/IO/S3/URI.h index f8f40cf91086..1b5c85ff2b7d 100644 --- a/src/IO/S3/URI.h +++ b/src/IO/S3/URI.h @@ -17,6 +17,7 @@ namespace DB::S3 * The following patterns are allowed: * s3://bucket/key * http(s)://endpoint/bucket/key + * http(s)://bucket..s3..vpce.amazonaws.com<:port_number>/bucket_name/key */ struct URI { diff --git a/src/IO/tests/gtest_s3_uri.cpp b/src/IO/tests/gtest_s3_uri.cpp index c088e41f1e8a..b04d2e79432a 100644 --- a/src/IO/tests/gtest_s3_uri.cpp +++ b/src/IO/tests/gtest_s3_uri.cpp @@ -74,6 +74,40 @@ const TestCase TestCases[] = { "data", "", true}, + {S3::URI("https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w.s3.us-east-1.vpce.amazonaws.com/root/nested/file.txt"), + "https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w.s3.us-east-1.vpce.amazonaws.com", + "root", + "nested/file.txt", + "", + false}, + // Test with a file with no extension + {S3::URI("https://bucket.vpce-03b2c987f1bd55c5f-j3b4vg7w.s3.ap-southeast-2.vpce.amazonaws.com/some_bucket/document"), + "https://bucket.vpce-03b2c987f1bd55c5f-j3b4vg7w.s3.ap-southeast-2.vpce.amazonaws.com", + "some_bucket", + "document", + "", + false}, + // Test with a deeply nested file path + {S3::URI("https://bucket.vpce-0242cd56f1bd55c5f-l5b7vg8x.s3.sa-east-1.vpce.amazonaws.com/some_bucket/b/c/d/e/f/g/h/i/j/data.json"), + "https://bucket.vpce-0242cd56f1bd55c5f-l5b7vg8x.s3.sa-east-1.vpce.amazonaws.com", + "some_bucket", + "b/c/d/e/f/g/h/i/j/data.json", + "", + false}, + // Zonal + {S3::URI("https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w-us-east-1a.s3.us-east-1.vpce.amazonaws.com/root/nested/file.txt"), + "https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w-us-east-1a.s3.us-east-1.vpce.amazonaws.com", + "root", + "nested/file.txt", + "", + false}, + // Non standard port + {S3::URI("https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w-us-east-1a.s3.us-east-1.vpce.amazonaws.com:65535/root/nested/file.txt"), + "https://bucket.vpce-07a1cd78f1bd55c5f-j3a3vg6w-us-east-1a.s3.us-east-1.vpce.amazonaws.com:65535", + "root", + "nested/file.txt", + "", + false}, }; class S3UriTest : public testing::TestWithParam From d9dbeda5955a545f5258fcc3d508ac776320a547 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 17 Apr 2024 10:39:28 -0700 Subject: [PATCH 109/111] Update reusable_build.yml --- .github/workflows/reusable_build.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 4204798f9861..1a18adf5478e 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -135,7 +135,13 @@ jobs: with: name: changed_images path: ${{ env.IMAGES_PATH }} - + + - name: Create source tar + run: | + mkdir -p "$TEMP_PATH/build_check/package_release" + cd .. && tar czf $TEMP_PATH/build_source.src.tar.gz ClickHouse/ + cd $TEMP_PATH && tar xvzf $TEMP_PATH/build_source.src.tar.gz + - name: Build run: | cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" From 3a7fb8649ba0fba99971720a6041c1cb9ab5bc3b Mon Sep 17 00:00:00 2001 From: Ilya Golshtein Date: Tue, 23 Apr 2024 13:52:24 +0000 Subject: [PATCH 110/111] kerberized_hadoop dockerfile: Download commons-daemon via https --- docker/test/integration/kerberized_hadoop/Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker/test/integration/kerberized_hadoop/Dockerfile b/docker/test/integration/kerberized_hadoop/Dockerfile index 592c3e36ef7f..50fe34c31da5 100644 --- a/docker/test/integration/kerberized_hadoop/Dockerfile +++ b/docker/test/integration/kerberized_hadoop/Dockerfile @@ -15,7 +15,10 @@ RUN curl -o krb5-libs-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault. rm -fr *.rpm RUN cd /tmp && \ - curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \ + curl -o wget.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/os/x86_64/Packages/wget-1.12-10.el6.x86_64.rpm && \ + rpm -i wget.rpm && \ + rm -fr *.rpm && \ + wget --no-check-certificate https://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz && \ tar xzf commons-daemon-1.0.15-src.tar.gz && \ cd commons-daemon-1.0.15-src/src/native/unix && \ ./configure && \ From dc4244e8f32d48cdba89cd3b97f1374dfc803df9 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Tue, 13 Feb 2024 16:55:53 +0100 Subject: [PATCH 111/111] Updated message for Altinity's build --- src/Daemon/BaseDaemon.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index be323dc67861..e95f2e5f0d73 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -492,6 +492,10 @@ class SignalListener : public Poco::Runnable LOG_FATAL(log, "ClickHouse version {} is old and should be upgraded to the latest version.", VERSION_STRING); } } + else if constexpr (std::string_view(VERSION_OFFICIAL).contains("altinity build")) + { + LOG_FATAL(log, "You are using an Altinity Stable Build. Please log issues at https://github.com/Altinity/ClickHouse/issues. Thank you!"); + } else { LOG_FATAL(log, "This ClickHouse version is not official and should be upgraded to the official build.");