diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 22f5d0652c9b..14f0628e454b 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -196,10 +196,19 @@ jobs: working-directory: ./clients/python - name: "Install source version of required packages" run: | - breeze release-management prepare-provider-packages fab standard common.sql --package-format \ - wheel --skip-tag-check --version-suffix-for-pypi dev0 - pip install . dist/apache_airflow_providers_fab-*.whl \ - dist/apache_airflow_providers_standard-*.whl dist/apache_airflow_providers_common_sql-*.whl + breeze release-management prepare-provider-packages \ + fab \ + standard \ + common.sql \ + sqlite \ + --package-format wheel \ + --skip-tag-check \ + --version-suffix-for-pypi dev0 + pip install . \ + dist/apache_airflow_providers_fab-*.whl \ + dist/apache_airflow_providers_standard-*.whl \ + dist/apache_airflow_providers_common_sql-*.whl \ + dist/apache_airflow_providers_sqlite-*.whl breeze release-management prepare-task-sdk-package --package-format wheel pip install ./dist/apache_airflow_task_sdk-*.whl - name: "Install Python client" diff --git a/.github/workflows/check-providers.yml b/.github/workflows/check-providers.yml index 3faf19b61f53..a0bf2d316f82 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/check-providers.yml @@ -40,7 +40,7 @@ on: # yamllint disable-line rule:truthy description: "Whether to upgrade to newer dependencies" required: true type: string - affected-providers-list-as-string: + selected-providers-list-as-string: description: "List of affected providers as string" required: false type: string @@ -54,7 +54,7 @@ on: # yamllint disable-line rule:truthy description: "List of parallel provider test types as string" required: true type: string - skip-provider-tests: + skip-providers-tests: description: "Whether to skip provider tests (true/false)" required: true type: string @@ -163,7 +163,7 @@ jobs: run: > breeze release-management prepare-provider-packages --include-not-ready-providers --version-suffix-for-pypi dev0 --package-format sdist - ${{ inputs.affected-providers-list-as-string }} + ${{ inputs.selected-providers-list-as-string }} - name: "Prepare airflow package: sdist" run: > breeze release-management prepare-airflow-package @@ -187,7 +187,7 @@ jobs: --providers-constraints-location /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt --run-in-parallel - if: inputs.affected-providers-list-as-string == '' + if: inputs.selected-providers-list-as-string == '' - name: "Install affected provider packages and airflow via sdist files" run: > breeze release-management install-provider-packages @@ -198,7 +198,7 @@ jobs: --providers-constraints-location /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt --run-in-parallel - if: inputs.affected-providers-list-as-string != '' + if: inputs.selected-providers-list-as-string != '' providers-compatibility-checks: timeout-minutes: 80 @@ -218,7 +218,7 @@ jobs: VERSION_SUFFIX_FOR_PYPI: "dev0" VERBOSE: "true" CLEAN_AIRFLOW_INSTALLATION: "${{ inputs.canary-run }}" - if: inputs.skip-provider-tests != 'true' + if: inputs.skip-providers-tests != 'true' steps: - name: "Cleanup repo" shell: bash @@ -268,7 +268,7 @@ jobs: Airflow ${{ matrix.airflow-version }}:Python ${{ matrix.python-version }} if: matrix.run-tests == 'true' run: > - breeze testing tests --run-in-parallel + breeze testing providers-tests --run-in-parallel --parallel-test-types "${{ inputs.providers-test-types-list-as-string }}" --use-packages-from-dist --package-format wheel diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9f61e356b3b2..96996a42ee18 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -55,86 +55,89 @@ jobs: env: GITHUB_CONTEXT: ${{ toJson(github) }} outputs: - image-tag: ${{ github.event.pull_request.head.sha || github.sha }} - docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} - affected-providers-list-as-string: >- - ${{ steps.selective-checks.outputs.affected-providers-list-as-string }} - upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - python-versions: ${{ steps.selective-checks.outputs.python-versions }} - python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} all-python-versions-list-as-string: >- ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} - default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} - kubernetes-versions-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} - kubernetes-combos-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} - default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} - postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} - default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} - mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} - default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} - default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} - default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} - force-pip: ${{ steps.selective-checks.outputs.force-pip }} - full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} - parallel-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.parallel-test-types-list-as-string }} - providers-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.providers-test-types-list-as-string }} - separate-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.separate-test-types-list-as-string }} - include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} - postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} - mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} - sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} - skip-provider-tests: ${{ steps.selective-checks.outputs.skip-provider-tests }} - run-tests: ${{ steps.selective-checks.outputs.run-tests }} - run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} - run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} - run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} - run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} + build-job-description: ${{ steps.source-run-info.outputs.build-job-description }} + canary-run: ${{ steps.source-run-info.outputs.canary-run }} + chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} - prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - docs-build: ${{ steps.selective-checks.outputs.docs-build }} - mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} - needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} - needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} - needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} - needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + core-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.core-test-types-list-as-string }} + debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} + default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} + default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} + default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} + docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} + docs-build: ${{ steps.selective-checks.outputs.docs-build }} docs-list-as-string: ${{ steps.selective-checks.outputs.docs-list-as-string }} - skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} - providers-compatibility-checks: ${{ steps.selective-checks.outputs.providers-compatibility-checks }} excluded-providers-as-string: ${{ steps.selective-checks.outputs.excluded-providers-as-string }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} + full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} + has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} - debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} - runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} - runs-on-as-json-docs-build: ${{ steps.selective-checks.outputs.runs-on-as-json-docs-build }} - runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} - runs-on-as-json-self-hosted-asf: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted-asf }} - is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} + image-tag: ${{ github.event.pull_request.head.sha || github.sha }} + in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }} + include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} + individual-providers-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-string }} is-airflow-runner: ${{ steps.selective-checks.outputs.is-airflow-runner }} is-amd-runner: ${{ steps.selective-checks.outputs.is-amd-runner }} is-arm-runner: ${{ steps.selective-checks.outputs.is-arm-runner }} - is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} + is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} + is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} + kubernetes-combos-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} + kubernetes-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} + mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} + mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} + mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} + needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} + needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} + needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }} - source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} + postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} + prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} + providers-compatibility-checks: ${{ steps.selective-checks.outputs.providers-compatibility-checks }} + providers-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.providers-test-types-list-as-string }} pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} - in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }} - build-job-description: ${{ steps.source-run-info.outputs.build-job-description }} - testable-integrations: ${{ steps.selective-checks.outputs.testable-integrations }} - canary-run: ${{ steps.source-run-info.outputs.canary-run }} + python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + python-versions: ${{ steps.selective-checks.outputs.python-versions }} + run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} + run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} + run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} + run-tests: ${{ steps.selective-checks.outputs.run-tests }} + run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} + run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} + runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} + runs-on-as-json-docs-build: ${{ steps.selective-checks.outputs.runs-on-as-json-docs-build }} + runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} + runs-on-as-json-self-hosted-asf: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted-asf }} + runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} + selected-providers-list-as-string: >- + ${{ steps.selective-checks.outputs.selected-providers-list-as-string }} + skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} + skip-providers-tests: ${{ steps.selective-checks.outputs.skip-providers-tests }} + source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} + test-groups: ${{ steps.selective-checks.outputs.test-groups }} + testable-core-integrations: ${{ steps.selective-checks.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ steps.selective-checks.outputs.testable-providers-integrations }} + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} steps: - name: "Cleanup repo" shell: bash @@ -328,7 +331,7 @@ jobs: packages: read secrets: inherit if: > - needs.build-info.outputs.skip-provider-tests != 'true' && + needs.build-info.outputs.skip-providers-tests != 'true' && needs.build-info.outputs.latest-versions-only != 'true' with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} @@ -336,9 +339,9 @@ jobs: canary-run: ${{ needs.build-info.outputs.canary-run }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - affected-providers-list-as-string: ${{ needs.build-info.outputs.affected-providers-list-as-string }} + selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} providers-compatibility-checks: ${{ needs.build-info.outputs.providers-compatibility-checks }} - skip-provider-tests: ${{ needs.build-info.outputs.skip-provider-tests }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} python-versions: ${{ needs.build-info.outputs.python-versions }} providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} @@ -374,17 +377,19 @@ jobs: backend: "postgres" test-name: "Postgres" test-scope: "DB" + test-groups: ${{ needs.build-info.outputs.test-groups }} image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.postgres-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.postgres-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-migration-tests: "true" run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + if: needs.build-info.outputs.run-tests == 'true' tests-mysql: name: "MySQL tests" @@ -399,17 +404,19 @@ jobs: backend: "mysql" test-name: "MySQL" test-scope: "DB" + test-groups: ${{ needs.build-info.outputs.test-groups }} image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.mysql-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.mysql-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} run-migration-tests: "true" debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + if: needs.build-info.outputs.run-tests == 'true' tests-sqlite: name: "Sqlite tests" @@ -425,18 +432,20 @@ jobs: test-name: "Sqlite" test-name-separator: "" test-scope: "DB" + test-groups: ${{ needs.build-info.outputs.test-groups }} image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for sqlite backend-versions: "['']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} run-migration-tests: "true" debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + if: needs.build-info.outputs.run-tests == 'true' tests-non-db: name: "Non-DB tests" @@ -452,17 +461,19 @@ jobs: test-name: "" test-name-separator: "" test-scope: "Non-DB" + test-groups: ${{ needs.build-info.outputs.test-groups }} image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for non-db backend-versions: "['']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + if: needs.build-info.outputs.run-tests == 'true' tests-special: name: "Special tests" @@ -478,9 +489,11 @@ jobs: needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' || needs.build-info.outputs.full-tests-needed == 'true') with: + test-groups: ${{ needs.build-info.outputs.test-groups }} runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} image-tag: ${{ needs.build-info.outputs.image-tag }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} python-versions: ${{ needs.build-info.outputs.python-versions }} @@ -490,10 +503,10 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - tests-integration: - name: Integration Tests + tests-integration-system: + name: Integration and System Tests needs: [build-info, wait-for-ci-images] - uses: ./.github/workflows/integration-tests.yml + uses: ./.github/workflows/integration-system-tests.yml permissions: contents: read packages: read @@ -501,17 +514,19 @@ jobs: with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} image-tag: ${{ needs.build-info.outputs.image-tag }} - testable-integrations: ${{ needs.build-info.outputs.testable-integrations }} + testable-core-integrations: ${{ needs.build-info.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ needs.build-info.outputs.testable-providers-integrations }} + run-system-tests: ${{ needs.build-info.outputs.run-tests }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} default-mysql-version: ${{ needs.build-info.outputs.default-mysql-version }} - skip-provider-tests: ${{ needs.build-info.outputs.skip-provider-tests }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} if: needs.build-info.outputs.run-tests == 'true' tests-with-lowest-direct-resolution: - name: "Lowest direct dependency resolution tests" + name: "Lowest direct dependency providers tests" needs: [build-info, wait-for-ci-images] uses: ./.github/workflows/run-unit-tests.yml permissions: @@ -525,13 +540,16 @@ jobs: test-name: "LowestDeps-Postgres" force-lowest-dependencies: "true" test-scope: "All" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.separate-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + # yamllint disable rule:line-length + providers-test-types-list-as-string: ${{ needs.build-info.outputs.individual-providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} @@ -686,7 +704,7 @@ jobs: - tests-mysql - tests-postgres - tests-non-db - - tests-integration + - tests-integration-system uses: ./.github/workflows/finalize-tests.yml with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} diff --git a/.github/workflows/helm-tests.yml b/.github/workflows/helm-tests.yml index 8b26769ff4bc..4c1ec1023fc9 100644 --- a/.github/workflows/helm-tests.yml +++ b/.github/workflows/helm-tests.yml @@ -75,7 +75,7 @@ jobs: - name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}" uses: ./.github/actions/prepare_breeze_and_image - name: "Helm Unit Tests: ${{ matrix.helm-test-package }}" - run: breeze testing helm-tests --helm-test-package "${{ matrix.helm-test-package }}" + run: breeze testing helm-tests --test-type "${{ matrix.helm-test-package }}" tests-helm-release: timeout-minutes: 80 diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml new file mode 100644 index 000000000000..7fde2ae96836 --- /dev/null +++ b/.github/workflows/integration-system-tests.yml @@ -0,0 +1,200 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Integration and system tests +on: # yamllint disable-line rule:truthy + workflow_call: + inputs: + runs-on-as-json-public: + description: "The array of labels (in json form) determining public runners." + required: true + type: string + image-tag: + description: "Tag to set for the image" + required: true + type: string + testable-core-integrations: + description: "The list of testable core integrations as JSON array." + required: true + type: string + testable-providers-integrations: + description: "The list of testable providers integrations as JSON array." + required: true + type: string + run-system-tests: + description: "Run system tests (true/false)" + required: true + type: string + default-postgres-version: + description: "Default version of Postgres to use" + required: true + type: string + default-mysql-version: + description: "Default version of MySQL to use" + required: true + type: string + skip-providers-tests: + description: "Skip provider tests (true/false)" + required: true + type: string + run-coverage: + description: "Run coverage (true/false)" + required: true + type: string + default-python-version: + description: "Which version of python should be used by default" + required: true + type: string + debug-resources: + description: "Debug resources (true/false)" + required: true + type: string +jobs: + tests-core-integration: + timeout-minutes: 130 + if: inputs.testable-core-integrations != '[]' + name: "Integration core ${{ matrix.integration }}" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + strategy: + fail-fast: false + matrix: + integration: ${{ fromJSON(inputs.testable-core-integrations) }} + env: + IMAGE_TAG: "${{ inputs.image-tag }}" + BACKEND: "postgres" + BACKEND_VERSION: ${{ inputs.default-postgres-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + JOB_ID: "integration-core-${{ matrix.integration }}" + SKIP_PROVIDERS_TESTS: "${{ inputs.skip-providers-tests }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage}}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + uses: ./.github/actions/prepare_breeze_and_image + - name: "Integration: core ${{ matrix.integration }}" + # yamllint disable rule:line-length + run: ./scripts/ci/testing/run_integration_tests_with_retry.sh core "${{ matrix.integration }}" + - name: "Post Tests success" + uses: ./.github/actions/post_tests_success + with: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + python-version: ${{ inputs.default-python-version }} + - name: "Post Tests failure" + uses: ./.github/actions/post_tests_failure + if: failure() + + tests-providers-integration: + timeout-minutes: 130 + if: inputs.testable-providers-integrations != '[]' && inputs.skip-providers-tests != 'true' + name: "Integration: providers ${{ matrix.integration }}" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + strategy: + fail-fast: false + matrix: + integration: ${{ fromJSON(inputs.testable-providers-integrations) }} + env: + IMAGE_TAG: "${{ inputs.image-tag }}" + BACKEND: "postgres" + BACKEND_VERSION: ${{ inputs.default-postgres-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + JOB_ID: "integration-providers-${{ matrix.integration }}" + SKIP_PROVIDERS_TESTS: "${{ inputs.skip-providers-tests }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage}}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + uses: ./.github/actions/prepare_breeze_and_image + - name: "Integration: providers ${{ matrix.integration }}" + run: ./scripts/ci/testing/run_integration_tests_with_retry.sh providers "${{ matrix.integration }}" + - name: "Post Tests success" + uses: ./.github/actions/post_tests_success + with: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + python-version: ${{ inputs.default-python-version }} + - name: "Post Tests failure" + uses: ./.github/actions/post_tests_failure + if: failure() + + tests-system: + timeout-minutes: 130 + if: inputs.run-system-tests == 'true' + name: "System Tests" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + env: + IMAGE_TAG: "${{ inputs.image-tag }}" + BACKEND: "postgres" + BACKEND_VERSION: ${{ inputs.default-postgres-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + JOB_ID: "system" + SKIP_PROVIDERS_TESTS: "${{ inputs.skip-providers-tests }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage}}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + uses: ./.github/actions/prepare_breeze_and_image + - name: "System Tests" + run: > + ./scripts/ci/testing/run_system_tests.sh + tests/system/example_empty.py providers/tests/system/example_empty.py + - name: "Post Tests success" + uses: ./.github/actions/post_tests_success + with: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + python-version: ${{ inputs.default-python-version }} + - name: "Post Tests failure" + uses: ./.github/actions/post_tests_failure + if: failure() diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index 530d0f9fc563..000000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Integration tests -on: # yamllint disable-line rule:truthy - workflow_call: - inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." - required: true - type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string - testable-integrations: - description: "The list of testable integrations as JSON array." - required: true - type: string - default-postgres-version: - description: "Default version of Postgres to use" - required: true - type: string - default-mysql-version: - description: "Default version of MySQL to use" - required: true - type: string - skip-provider-tests: - description: "Skip provider tests (true/false)" - required: true - type: string - run-coverage: - description: "Run coverage (true/false)" - required: true - type: string - default-python-version: - description: "Which version of python should be used by default" - required: true - type: string - debug-resources: - description: "Debug resources (true/false)" - required: true - type: string -jobs: - tests-integration: - timeout-minutes: 130 - if: inputs.testable-integrations != '[]' - name: "Integration Tests: ${{ matrix.integration }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} - strategy: - fail-fast: false - matrix: - integration: ${{ fromJSON(inputs.testable-integrations) }} - env: - IMAGE_TAG: "${{ inputs.image-tag }}" - BACKEND: "postgres" - BACKEND_VERSION: ${{ inputs.default-postgres-version }}" - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - JOB_ID: "integration-${{ matrix.integration }}" - SKIP_PROVIDER_TESTS: "${{ inputs.skip-provider-tests }}" - ENABLE_COVERAGE: "${{ inputs.run-coverage}}" - DEBUG_RESOURCES: "${{ inputs.debug-resources }}" - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - VERBOSE: "true" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" - uses: ./.github/actions/prepare_breeze_and_image - - name: "Integration Tests: ${{ matrix.integration }}" - run: ./scripts/ci/testing/run_integration_tests_with_retry.sh ${{ matrix.integration }} - - name: "Post Tests success: Integration Tests ${{ matrix.integration }}" - uses: ./.github/actions/post_tests_success - with: - codecov-token: ${{ secrets.CODECOV_TOKEN }} - python-version: ${{ inputs.default-python-version }} - - name: "Post Tests failure: Integration Tests ${{ matrix.integration }}" - uses: ./.github/actions/post_tests_failure - if: failure() diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index eb3e1a90707f..8b8568083501 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -24,6 +24,10 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string + test-groups: + description: "The json representing list of test test groups to run" + required: true + type: string backend: description: "The backend to run the tests on" required: true @@ -61,8 +65,12 @@ on: # yamllint disable-line rule:truthy description: "Excluded combos (stringified JSON array of python-version/backend-version dicts)" required: true type: string - parallel-test-types-list-as-string: - description: "The list of parallel test types to run separated by spaces" + core-test-types-list-as-string: + description: "The list of core test types to run separated by spaces" + required: true + type: string + providers-test-types-list-as-string: + description: "The list of providers test types to run separated by spaces" required: true type: string run-migration-tests: @@ -117,9 +125,9 @@ jobs: tests: timeout-minutes: 120 name: "\ - ${{ inputs.test-scope }}:\ + ${{ inputs.test-scope }}-${{ matrix.test-group }}:\ ${{ inputs.test-name }}${{ inputs.test-name-separator }}${{ matrix.backend-version }}:\ - ${{matrix.python-version}}: ${{ inputs.parallel-test-types-list-as-string }}" + ${{matrix.python-version}}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: fail-fast: false @@ -127,8 +135,8 @@ jobs: python-version: "${{fromJSON(inputs.python-versions)}}" backend-version: "${{fromJSON(inputs.backend-versions)}}" exclude: "${{fromJSON(inputs.excludes)}}" + test-group: "${{fromJSON(inputs.test-groups)}}" env: - # yamllint disable rule:line-length AIRFLOW_ENABLE_AIP_44: "${{ inputs.enable-aip-44 }}" BACKEND: "${{ inputs.backend }}" BACKEND_VERSION: "${{ matrix.backend-version }}" @@ -145,9 +153,10 @@ jobs: IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} # yamllint disable rule:line-length - JOB_ID: "${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" + JOB_ID: "${{ matrix.test-group }}-${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" MOUNT_SOURCES: "skip" - PARALLEL_TEST_TYPES: "${{ inputs.parallel-test-types-list-as-string }}" + # yamllint disable rule:line-length + PARALLEL_TEST_TYPES: ${{ matrix.test-group == 'core' && inputs.core-test-types-list-as-string || inputs.providers-test-types-list-as-string }} PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" UPGRADE_BOTO: "${{ inputs.upgrade-boto }}" AIRFLOW_MONITOR_DELAY_TIME_IN_SECONDS: "${{inputs.monitor-delay-time-in-seconds}}" @@ -165,33 +174,13 @@ jobs: - name: "Prepare breeze & CI image: ${{matrix.python-version}}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image - name: > - Migration Tests: - ${{ matrix.python-version }}:${{ inputs.parallel-test-types-list-as-string }} + Migration Tests: ${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} uses: ./.github/actions/migration_tests - if: inputs.run-migration-tests == 'true' + if: inputs.run-migration-tests == 'true' && matrix.test-group == 'core' - name: > - ${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }} - Py${{ matrix.python-version }}:${{ inputs.parallel-test-types-list-as-string}} - run: | - if [[ "${{ inputs.test-scope }}" == "DB" ]]; then - breeze testing db-tests \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "Non-DB" ]]; then - breeze testing non-db-tests \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "All" ]]; then - breeze testing tests --run-in-parallel \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "Quarantined" ]]; then - breeze testing tests --test-type "All-Quarantined" || true - elif [[ "${{ inputs.test-scope }}" == "ARM collection" ]]; then - breeze testing tests --collect-only --remove-arm-packages - elif [[ "${{ inputs.test-scope }}" == "System" ]]; then - breeze testing tests tests/system/example_empty.py --system core - else - echo "Unknown test scope: ${{ inputs.test-scope }}" - exit 1 - fi + ${{ matrix.test-group}}:${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }} + Py${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} + run: ./scripts/ci/testing/run_unit_tests.sh "${{ matrix.test-group }}" "${{ inputs.test-scope }}" - name: "Post Tests success" uses: ./.github/actions/post_tests_success with: diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index 78b4d928f7a9..012b619cba94 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -24,12 +24,20 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string + test-groups: + description: "The json representing list of test test groups to run" + required: true + type: string image-tag: description: "Tag to set for the image" required: true type: string - parallel-test-types-list-as-string: - description: "The list of parallel test types to run separated by spaces" + core-test-types-list-as-string: + description: "The list of core test types to run separated by spaces" + required: true + type: string + providers-test-types-list-as-string: + description: "The list of providers test types to run separated by spaces" required: true type: string run-coverage: @@ -77,14 +85,15 @@ jobs: downgrade-sqlalchemy: "true" test-name: "MinSQLAlchemy-Postgres" test-scope: "DB" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} @@ -100,13 +109,15 @@ jobs: upgrade-boto: "true" test-name: "LatestBoto-Postgres" test-scope: "All" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} @@ -123,13 +134,15 @@ jobs: downgrade-pendulum: "true" test-name: "Pendulum2-Postgres" test-scope: "All" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} @@ -146,13 +159,15 @@ jobs: enable-aip-44: "false" test-name: "InProgressDisabled-Postgres" test-scope: "All" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} @@ -168,13 +183,15 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "Postgres" test-scope: "Quarantined" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} @@ -190,19 +207,21 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "Postgres" test-scope: "ARM collection" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} tests-system: - name: "System test" + name: "System test: ${{ matrix.test-group }}" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read @@ -212,13 +231,15 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "SystemTest" test-scope: "System" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} diff --git a/Dockerfile b/Dockerfile index 5ca9949b0213..d9fb1878f116 100644 --- a/Dockerfile +++ b/Dockerfile @@ -890,7 +890,7 @@ function install_airflow() { # Similarly we need _a_ file for task_sdk too mkdir -p ./task_sdk/src/airflow/sdk/ - touch ./task_sdk/src/airflow/sdk/__init__.py + echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT diff --git a/Dockerfile.ci b/Dockerfile.ci index 943270aec693..2328688f0fa3 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -660,7 +660,7 @@ function install_airflow() { # Similarly we need _a_ file for task_sdk too mkdir -p ./task_sdk/src/airflow/sdk/ - touch ./task_sdk/src/airflow/sdk/__init__.py + echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT @@ -1068,12 +1068,6 @@ function check_run_tests() { python "${IN_CONTAINER_DIR}/remove_arm_packages.py" fi - if [[ ${TEST_TYPE} == "PlainAsserts" ]]; then - # Plain asserts should be converted to env variable to make sure they are taken into account - # otherwise they will not be effective during test collection when plain assert is breaking collection - export PYTEST_PLAIN_ASSERTS="true" - fi - if [[ ${DATABASE_ISOLATION=} == "true" ]]; then echo "${COLOR_BLUE}Starting internal API server:${COLOR_RESET}" # We need to start the internal API server before running tests @@ -1101,7 +1095,7 @@ function check_run_tests() { fi fi - if [[ ${RUN_SYSTEM_TESTS:="false"} == "true" ]]; then + if [[ ${TEST_GROUP:=""} == "system" ]]; then exec "${IN_CONTAINER_DIR}/run_system_tests.sh" "${@}" else exec "${IN_CONTAINER_DIR}/run_ci_tests.sh" "${@}" diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 6a38eb27ff45..0c0a0322e628 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -403,6 +403,7 @@ def update_dag_run_state(*, dag_id: str, dag_run_id: str, session: Session = NEW return dagrun_schema.dump(dag_run) +@mark_fastapi_migration_done @security.requires_access_dag("PUT", DagAccessEntity.RUN) @action_logging @provide_session diff --git a/airflow/api_fastapi/core_api/datamodels/assets.py b/airflow/api_fastapi/core_api/datamodels/assets.py index 9ac4528964e6..95084afa5e91 100644 --- a/airflow/api_fastapi/core_api/datamodels/assets.py +++ b/airflow/api_fastapi/core_api/datamodels/assets.py @@ -23,7 +23,7 @@ class DagScheduleAssetReference(BaseModel): - """Serializable version of the DagScheduleAssetReference ORM SqlAlchemyModel.""" + """DAG schedule reference serializer for assets.""" dag_id: str created_at: datetime @@ -31,7 +31,7 @@ class DagScheduleAssetReference(BaseModel): class TaskOutletAssetReference(BaseModel): - """Serializable version of the TaskOutletAssetReference ORM SqlAlchemyModel.""" + """Task outlet reference serializer for assets.""" dag_id: str task_id: str @@ -40,7 +40,7 @@ class TaskOutletAssetReference(BaseModel): class AssetAliasSchema(BaseModel): - """Serializable version of the AssetAliasSchema ORM SqlAlchemyModel.""" + """Asset alias serializer for assets.""" id: int name: str diff --git a/airflow/api_fastapi/core_api/datamodels/dag_run.py b/airflow/api_fastapi/core_api/datamodels/dag_run.py index 102567f69976..8241885aff2f 100644 --- a/airflow/api_fastapi/core_api/datamodels/dag_run.py +++ b/airflow/api_fastapi/core_api/datamodels/dag_run.py @@ -41,6 +41,12 @@ class DAGRunPatchBody(BaseModel): note: str | None = Field(None, max_length=1000) +class DAGRunClearBody(BaseModel): + """DAG Run serializer for clear endpoint body.""" + + dry_run: bool = True + + class DAGRunResponse(BaseModel): """DAG Run serializer for responses.""" diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 6d592e9d1756..f3d30e88ee87 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1215,6 +1215,65 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/clear: + post: + tags: + - DagRun + summary: Clear Dag Run + operationId: clear_dag_run + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunClearBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + anyOf: + - $ref: '#/components/schemas/TaskInstanceCollectionResponse' + - $ref: '#/components/schemas/DAGRunResponse' + title: Response Clear Dag Run + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dagSources/{file_token}: get: tags: @@ -2257,31 +2316,6 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/monitor/health: - get: - tags: - - Monitor - summary: Get Health - operationId: get_health - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/HealthInfoSchema' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' /public/plugins/: get: tags: @@ -3570,32 +3604,6 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/version/: - get: - tags: - - Version - summary: Get Version - description: Get version information. - operationId: get_version - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VersionInfo' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}: get: tags: @@ -3690,6 +3698,33 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/monitor/health: + get: + tags: + - Monitor + summary: Get Health + operationId: get_health + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HealthInfoSchema' + /public/version/: + get: + tags: + - Version + summary: Get Version + description: Get version information. + operationId: get_version + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VersionInfo' components: schemas: AppBuilderMenuItemResponse: @@ -3752,7 +3787,7 @@ components: - id - name title: AssetAliasSchema - description: Serializable version of the AssetAliasSchema ORM SqlAlchemyModel. + description: Asset alias serializer for assets. AssetCollectionResponse: properties: assets: @@ -4509,6 +4544,15 @@ components: - file_token title: DAGResponse description: DAG serializer for responses. + DAGRunClearBody: + properties: + dry_run: + type: boolean + title: Dry Run + default: true + type: object + title: DAGRunClearBody + description: DAG Run serializer for clear endpoint body. DAGRunPatchBody: properties: state: @@ -5002,7 +5046,7 @@ components: - created_at - updated_at title: DagScheduleAssetReference - description: Serializable version of the DagScheduleAssetReference ORM SqlAlchemyModel. + description: DAG schedule reference serializer for assets. DagStatsCollectionResponse: properties: dags: @@ -5937,7 +5981,7 @@ components: - created_at - updated_at title: TaskOutletAssetReference - description: Serializable version of the TaskOutletAssetReference ORM SqlAlchemyModel. + description: Task outlet reference serializer for assets. TaskResponse: properties: task_id: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index 71de864fa3ee..e85d17ae4ca8 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -41,28 +41,34 @@ from airflow.api_fastapi.core_api.routes.public.version import version_router from airflow.api_fastapi.core_api.routes.public.xcom import xcom_router -public_router = AirflowRouter( - prefix="/public", +public_router = AirflowRouter(prefix="/public") + +# Router with common attributes for all routes +authenticated_router = AirflowRouter( responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), ) +authenticated_router.include_router(assets_router) +authenticated_router.include_router(backfills_router) +authenticated_router.include_router(connections_router) +authenticated_router.include_router(dag_run_router) +authenticated_router.include_router(dag_sources_router) +authenticated_router.include_router(dag_stats_router) +authenticated_router.include_router(dag_warning_router) +authenticated_router.include_router(dags_router) +authenticated_router.include_router(event_logs_router) +authenticated_router.include_router(import_error_router) +authenticated_router.include_router(plugins_router) +authenticated_router.include_router(pools_router) +authenticated_router.include_router(providers_router) +authenticated_router.include_router(task_instances_router) +authenticated_router.include_router(tasks_router) +authenticated_router.include_router(variables_router) +authenticated_router.include_router(xcom_router) + +# Include authenticated router in public router +public_router.include_router(authenticated_router) -public_router.include_router(assets_router) -public_router.include_router(backfills_router) -public_router.include_router(connections_router) -public_router.include_router(dag_run_router) -public_router.include_router(dag_sources_router) -public_router.include_router(dag_stats_router) -public_router.include_router(dag_warning_router) -public_router.include_router(dags_router) -public_router.include_router(event_logs_router) -public_router.include_router(import_error_router) +# Following routers are not included in common router, for now we don't expect it to have authentication public_router.include_router(monitor_router) -public_router.include_router(plugins_router) -public_router.include_router(pools_router) -public_router.include_router(providers_router) -public_router.include_router(task_instances_router) -public_router.include_router(tasks_router) -public_router.include_router(variables_router) public_router.include_router(version_router) -public_router.include_router(xcom_router) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow/api_fastapi/core_api/routes/public/dag_run.py index 810896806eea..d95cf76f69ae 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -31,10 +31,15 @@ from airflow.api_fastapi.common.db.common import get_session from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.dag_run import ( + DAGRunClearBody, DAGRunPatchBody, DAGRunPatchStates, DAGRunResponse, ) +from airflow.api_fastapi.core_api.datamodels.task_instances import ( + TaskInstanceCollectionResponse, + TaskInstanceResponse, +) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.models import DAG, DagRun @@ -142,3 +147,51 @@ def patch_dag_run( dag_run = session.get(DagRun, dag_run.id) return DAGRunResponse.model_validate(dag_run, from_attributes=True) + + +@dag_run_router.post( + "/{dag_run_id}/clear", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]) +) +def clear_dag_run( + dag_id: str, + dag_run_id: str, + body: DAGRunClearBody, + request: Request, + session: Annotated[Session, Depends(get_session)], +) -> TaskInstanceCollectionResponse | DAGRunResponse: + dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + if dag_run is None: + raise HTTPException( + 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + ) + + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + start_date = dag_run.logical_date + end_date = dag_run.logical_date + + if body.dry_run: + task_instances = dag.clear( + start_date=start_date, + end_date=end_date, + task_ids=None, + only_failed=False, + dry_run=True, + session=session, + ) + + return TaskInstanceCollectionResponse( + task_instances=[ + TaskInstanceResponse.model_validate(ti, from_attributes=True) for ti in task_instances + ], + total_entries=len(task_instances), + ) + else: + dag.clear( + start_date=dag_run.start_date, + end_date=dag_run.end_date, + task_ids=None, + only_failed=False, + session=session, + ) + dag_run_cleared = session.scalar(select(DagRun).where(DagRun.id == dag_run.id)) + return DAGRunResponse.model_validate(dag_run_cleared, from_attributes=True) diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index c77f9476b0d2..eba9f7b8c70e 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -1064,24 +1064,6 @@ metrics: example: "\"scheduler,executor,dagrun,pool,triggerer,celery\" or \"^scheduler,^executor,heartbeat|timeout\"" default: "" - # TODO: Remove 'timer_unit_consistency' in Airflow 3.0 - timer_unit_consistency: - description: | - Controls the consistency of timer units across all metrics loggers - (e.g., Statsd, Datadog, OpenTelemetry) - for timing and duration-based metrics. When enabled, all timers will publish - metrics in milliseconds for consistency and alignment with Airflow's default - metrics behavior in version 3.0+. - - .. warning:: - - It will be the default behavior from Airflow 3.0. If disabled, timers may publish - in seconds for backwards compatibility, though it is recommended to enable this - setting to ensure metric uniformity and forward-compat with Airflow 3. - version_added: 2.11.0 - type: string - example: ~ - default: "False" statsd_on: description: | Enables sending metrics to StatsD. diff --git a/airflow/metrics/datadog_logger.py b/airflow/metrics/datadog_logger.py index 81926716eb25..a166c6fcb169 100644 --- a/airflow/metrics/datadog_logger.py +++ b/airflow/metrics/datadog_logger.py @@ -19,11 +19,9 @@ import datetime import logging -import warnings from typing import TYPE_CHECKING from airflow.configuration import conf -from airflow.exceptions import RemovedInAirflow3Warning from airflow.metrics.protocols import Timer from airflow.metrics.validators import ( PatternAllowListValidator, @@ -42,14 +40,6 @@ log = logging.getLogger(__name__) -timer_unit_consistency = conf.getboolean("metrics", "timer_unit_consistency") -if not timer_unit_consistency: - warnings.warn( - "Timer and timing metrics publish in seconds were deprecated. It is enabled by default from Airflow 3 onwards. Enable timer_unit_consistency to publish all the timer and timing metrics in milliseconds.", - RemovedInAirflow3Warning, - stacklevel=2, - ) - class SafeDogStatsdLogger: """DogStatsd Logger.""" @@ -144,10 +134,7 @@ def timing( tags_list = [] if self.metrics_validator.test(stat): if isinstance(dt, datetime.timedelta): - if timer_unit_consistency: - dt = dt.total_seconds() * 1000.0 - else: - dt = dt.total_seconds() + dt = dt.total_seconds() * 1000.0 return self.dogstatsd.timing(metric=stat, value=dt, tags=tags_list) return None diff --git a/airflow/metrics/otel_logger.py b/airflow/metrics/otel_logger.py index ed123608626f..c3633212cd27 100644 --- a/airflow/metrics/otel_logger.py +++ b/airflow/metrics/otel_logger.py @@ -31,7 +31,6 @@ from opentelemetry.sdk.resources import HOST_NAME, SERVICE_NAME, Resource from airflow.configuration import conf -from airflow.exceptions import RemovedInAirflow3Warning from airflow.metrics.protocols import Timer from airflow.metrics.validators import ( OTEL_NAME_MAX_LENGTH, @@ -73,14 +72,6 @@ # Delimiter is placed between the universal metric prefix and the unique metric name. DEFAULT_METRIC_NAME_DELIMITER = "." -timer_unit_consistency = conf.getboolean("metrics", "timer_unit_consistency") -if not timer_unit_consistency: - warnings.warn( - "Timer and timing metrics publish in seconds were deprecated. It is enabled by default from Airflow 3 onwards. Enable timer_unit_consistency to publish all the timer and timing metrics in milliseconds.", - RemovedInAirflow3Warning, - stacklevel=2, - ) - def full_name(name: str, *, prefix: str = DEFAULT_METRIC_NAME_PREFIX) -> str: """Assembles the prefix, delimiter, and name and returns it as a string.""" @@ -284,10 +275,7 @@ def timing( """OTel does not have a native timer, stored as a Gauge whose value is number of seconds elapsed.""" if self.metrics_validator.test(stat) and name_is_otel_safe(self.prefix, stat): if isinstance(dt, datetime.timedelta): - if timer_unit_consistency: - dt = dt.total_seconds() * 1000.0 - else: - dt = dt.total_seconds() + dt = dt.total_seconds() * 1000.0 self.metrics_map.set_gauge_value(full_name(prefix=self.prefix, name=stat), float(dt), False, tags) def timer( diff --git a/airflow/metrics/protocols.py b/airflow/metrics/protocols.py index 0d12704e87a3..8cfe4d8e7ea3 100644 --- a/airflow/metrics/protocols.py +++ b/airflow/metrics/protocols.py @@ -19,23 +19,12 @@ import datetime import time -import warnings from typing import Union -from airflow.configuration import conf -from airflow.exceptions import RemovedInAirflow3Warning from airflow.typing_compat import Protocol DeltaType = Union[int, float, datetime.timedelta] -timer_unit_consistency = conf.getboolean("metrics", "timer_unit_consistency") -if not timer_unit_consistency: - warnings.warn( - "Timer and timing metrics publish in seconds were deprecated. It is enabled by default from Airflow 3 onwards. Enable timer_unit_consistency to publish all the timer and timing metrics in milliseconds.", - RemovedInAirflow3Warning, - stacklevel=2, - ) - class TimerProtocol(Protocol): """Type protocol for StatsLogger.timer.""" @@ -127,9 +116,6 @@ def start(self) -> Timer: def stop(self, send: bool = True) -> None: """Stop the timer, and optionally send it to stats backend.""" if self._start_time is not None: - if timer_unit_consistency: - self.duration = 1000.0 * (time.perf_counter() - self._start_time) # Convert to milliseconds. - else: - self.duration = time.perf_counter() - self._start_time + self.duration = 1000.0 * (time.perf_counter() - self._start_time) # Convert to milliseconds. if send and self.real_timer: self.real_timer.stop() diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index feafb0b6b637..58c2aec6fdeb 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -38,7 +38,7 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.task_group import MappedTaskGroup from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.weight_rule import WeightRule +from airflow.utils.weight_rule import WeightRule, db_safe_priority if TYPE_CHECKING: from collections.abc import Mapping @@ -335,7 +335,7 @@ def priority_weight_total(self) -> int: ) if isinstance(self.weight_rule, _AbsolutePriorityWeightStrategy): - return self.priority_weight + return db_safe_priority(self.priority_weight) elif isinstance(self.weight_rule, _DownstreamPriorityWeightStrategy): upstream = False elif isinstance(self.weight_rule, _UpstreamPriorityWeightStrategy): @@ -344,10 +344,13 @@ def priority_weight_total(self) -> int: upstream = False dag = self.get_dag() if dag is None: - return self.priority_weight - return self.priority_weight + sum( - dag.task_dict[task_id].priority_weight - for task_id in self.get_flat_relative_ids(upstream=upstream) + return db_safe_priority(self.priority_weight) + return db_safe_priority( + self.priority_weight + + sum( + dag.task_dict[task_id].priority_weight + for task_id in self.get_flat_relative_ids(upstream=upstream) + ) ) @cached_property diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index c1448ef9cc55..fa26a2026f5d 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -473,6 +473,8 @@ class derived from this one results in the creation of a task object, This allows the executor to trigger higher priority tasks before others when things get backed up. Set priority_weight as a higher number for more important tasks. + As not all database engines support 64-bit integers, values are capped with 32-bit. + Valid range is from -2,147,483,648 to 2,147,483,647. :param weight_rule: weighting method used for the effective total priority weight of the task. Options are: ``{ downstream | upstream | absolute }`` default is ``downstream`` @@ -494,7 +496,8 @@ class derived from this one results in the creation of a task object, Additionally, when set to ``absolute``, there is bonus effect of significantly speeding up the task creation process as for very large DAGs. Options can be set as string or using the constants defined in - the static class ``airflow.utils.WeightRule`` + the static class ``airflow.utils.WeightRule``. + Irrespective of the weight rule, resulting priority values are capped with 32-bit. |experimental| Since 2.9.0, Airflow allows to define custom priority weight strategy, by creating a subclass of diff --git a/airflow/models/dag.py b/airflow/models/dag.py index e48ec0a9a9c5..8ca80d019e02 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -1404,6 +1404,40 @@ def set_task_group_state( return altered + @overload + def clear( + self, + *, + dry_run: Literal[True], + task_ids: Collection[str | tuple[str, int]] | None = None, + start_date: datetime | None = None, + end_date: datetime | None = None, + only_failed: bool = False, + only_running: bool = False, + confirm_prompt: bool = False, + dag_run_state: DagRunState = DagRunState.QUEUED, + session: Session = NEW_SESSION, + dag_bag: DagBag | None = None, + exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), + ) -> list[TaskInstance]: ... # pragma: no cover + + @overload + def clear( + self, + *, + task_ids: Collection[str | tuple[str, int]] | None = None, + start_date: datetime | None = None, + end_date: datetime | None = None, + only_failed: bool = False, + only_running: bool = False, + confirm_prompt: bool = False, + dag_run_state: DagRunState = DagRunState.QUEUED, + dry_run: Literal[False] = False, + session: Session = NEW_SESSION, + dag_bag: DagBag | None = None, + exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), + ) -> int: ... # pragma: no cover + @provide_session def clear( self, @@ -1418,7 +1452,7 @@ def clear( session: Session = NEW_SESSION, dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), - ) -> int | Iterable[TaskInstance]: + ) -> int | list[TaskInstance]: """ Clear a set of task instances associated with the current dag for a specified date range. diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 410cdd8773d3..30ef941ceea5 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -26,7 +26,6 @@ import operator import os import signal -import warnings from collections import defaultdict from contextlib import nullcontext from datetime import timedelta @@ -85,7 +84,6 @@ AirflowSkipException, AirflowTaskTerminated, AirflowTaskTimeout, - RemovedInAirflow3Warning, TaskDeferralError, TaskDeferred, UnmappableXComLengthPushed, @@ -176,14 +174,6 @@ PAST_DEPENDS_MET = "past_depends_met" -timer_unit_consistency = conf.getboolean("metrics", "timer_unit_consistency") -if not timer_unit_consistency: - warnings.warn( - "Timer and timing metrics publish in seconds were deprecated. It is enabled by default from Airflow 3 onwards. Enable timer_unit_consistency to publish all the timer and timing metrics in milliseconds.", - RemovedInAirflow3Warning, - stacklevel=2, - ) - class TaskReturnCode(Enum): """ @@ -2831,10 +2821,7 @@ def emit_state_change_metric(self, new_state: TaskInstanceState) -> None: self.task_id, ) return - if timer_unit_consistency: - timing = timezone.utcnow() - self.queued_dttm - else: - timing = (timezone.utcnow() - self.queued_dttm).total_seconds() + timing = timezone.utcnow() - self.queued_dttm elif new_state == TaskInstanceState.QUEUED: metric_name = "scheduled_duration" if self.start_date is None: @@ -2847,10 +2834,7 @@ def emit_state_change_metric(self, new_state: TaskInstanceState) -> None: self.task_id, ) return - if timer_unit_consistency: - timing = timezone.utcnow() - self.start_date - else: - timing = (timezone.utcnow() - self.start_date).total_seconds() + timing = timezone.utcnow() - self.start_date else: raise NotImplementedError("no metric emission setup for state %s", new_state) diff --git a/airflow/settings.py b/airflow/settings.py index a3f99510adba..c3f32fa59d98 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -31,6 +31,7 @@ import pluggy from packaging.version import Version from sqlalchemy import create_engine, exc, text +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.pool import NullPool @@ -95,8 +96,17 @@ DONOT_MODIFY_HANDLERS: bool | None = None DAGS_FOLDER: str = os.path.expanduser(conf.get_mandatory_value("core", "DAGS_FOLDER")) +AIO_LIBS_MAPPING = {"sqlite": "aiosqlite", "postgresql": "asyncpg", "mysql": "aiomysql"} +""" +Mapping of sync scheme to async scheme. + +:meta private: +""" + engine: Engine Session: Callable[..., SASession] +async_engine: AsyncEngine +create_async_session: Callable[..., AsyncSession] # The JSON library to use for DAG Serialization and De-Serialization json = json @@ -199,13 +209,25 @@ def load_policy_plugins(pm: pluggy.PluginManager): pm.load_setuptools_entrypoints("airflow.policy") +def _get_async_conn_uri_from_sync(sync_uri): + scheme, rest = sync_uri.split(":", maxsplit=1) + scheme = scheme.split("+", maxsplit=1)[0] + aiolib = AIO_LIBS_MAPPING.get(scheme) + if aiolib: + return f"{scheme}+{aiolib}:{rest}" + else: + return sync_uri + + def configure_vars(): """Configure Global Variables from airflow.cfg.""" global SQL_ALCHEMY_CONN + global SQL_ALCHEMY_CONN_ASYNC global DAGS_FOLDER global PLUGINS_FOLDER global DONOT_MODIFY_HANDLERS SQL_ALCHEMY_CONN = conf.get("database", "SQL_ALCHEMY_CONN") + SQL_ALCHEMY_CONN_ASYNC = _get_async_conn_uri_from_sync(sync_uri=SQL_ALCHEMY_CONN) DAGS_FOLDER = os.path.expanduser(conf.get("core", "DAGS_FOLDER")) @@ -441,6 +463,9 @@ def configure_orm(disable_connection_pool=False, pool_class=None): global Session global engine + global async_engine + global create_async_session + if os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true": # Skip DB initialization in unit tests, if DB tests are skipped Session = SkipDBTestsSession @@ -466,7 +491,14 @@ def configure_orm(disable_connection_pool=False, pool_class=None): connect_args["check_same_thread"] = False engine = create_engine(SQL_ALCHEMY_CONN, connect_args=connect_args, **engine_args, future=True) - + async_engine = create_async_engine(SQL_ALCHEMY_CONN_ASYNC, future=True) + create_async_session = sessionmaker( + bind=async_engine, + autocommit=False, + autoflush=False, + class_=AsyncSession, + expire_on_commit=False, + ) mask_secret(engine.url.password) setup_event_handlers(engine) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index b4d6b6e060e3..605431551880 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -644,18 +644,6 @@ export const UseImportErrorServiceGetImportErrorsKeyFn = ( useImportErrorServiceGetImportErrorsKey, ...(queryKey ?? [{ limit, offset, orderBy }]), ]; -export type MonitorServiceGetHealthDefaultResponse = Awaited< - ReturnType ->; -export type MonitorServiceGetHealthQueryResult< - TData = MonitorServiceGetHealthDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; -export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ - useMonitorServiceGetHealthKey, - ...(queryKey ?? []), -]; export type PluginServiceGetPluginsDefaultResponse = Awaited< ReturnType >; @@ -1044,18 +1032,6 @@ export const UseVariableServiceGetVariablesKeyFn = ( useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy }]), ]; -export type VersionServiceGetVersionDefaultResponse = Awaited< - ReturnType ->; -export type VersionServiceGetVersionQueryResult< - TData = VersionServiceGetVersionDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useVersionServiceGetVersionKey = "VersionServiceGetVersion"; -export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ - useVersionServiceGetVersionKey, - ...(queryKey ?? []), -]; export type XcomServiceGetXcomEntryDefaultResponse = Awaited< ReturnType >; @@ -1089,6 +1065,30 @@ export const UseXcomServiceGetXcomEntryKeyFn = ( { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, ]), ]; +export type MonitorServiceGetHealthDefaultResponse = Awaited< + ReturnType +>; +export type MonitorServiceGetHealthQueryResult< + TData = MonitorServiceGetHealthDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; +export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ + useMonitorServiceGetHealthKey, + ...(queryKey ?? []), +]; +export type VersionServiceGetVersionDefaultResponse = Awaited< + ReturnType +>; +export type VersionServiceGetVersionQueryResult< + TData = VersionServiceGetVersionDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useVersionServiceGetVersionKey = "VersionServiceGetVersion"; +export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ + useVersionServiceGetVersionKey, + ...(queryKey ?? []), +]; export type BackfillServiceCreateBackfillMutationResult = Awaited< ReturnType >; @@ -1098,6 +1098,9 @@ export type ConnectionServicePostConnectionMutationResult = Awaited< export type ConnectionServiceTestConnectionMutationResult = Awaited< ReturnType >; +export type DagRunServiceClearDagRunMutationResult = Awaited< + ReturnType +>; export type PoolServicePostPoolMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 6e60c36e4f36..644ff61c0077 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -833,16 +833,6 @@ export const prefetchUseImportErrorServiceGetImportErrors = ( queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }), }); -/** - * Get Health - * @returns HealthInfoSchema Successful Response - * @throws ApiError - */ -export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(), - queryFn: () => MonitorService.getHealth(), - }); /** * Get Plugins * @param data The data for the request. @@ -1396,17 +1386,6 @@ export const prefetchUseVariableServiceGetVariables = ( }), queryFn: () => VariableService.getVariables({ limit, offset, orderBy }), }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(), - queryFn: () => VersionService.getVersion(), - }); /** * Get Xcom Entry * Get an XCom entry. @@ -1462,3 +1441,24 @@ export const prefetchUseXcomServiceGetXcomEntry = ( xcomKey, }), }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(), + queryFn: () => MonitorService.getHealth(), + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(), + queryFn: () => VersionService.getVersion(), + }); diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 50c9fd6b4c74..5be5cc180e07 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -33,6 +33,7 @@ import { BackfillPostBody, ConnectionBody, DAGPatchBody, + DAGRunClearBody, DAGRunPatchBody, DagRunState, DagWarningType, @@ -1024,24 +1025,6 @@ export const useImportErrorServiceGetImportErrors = < ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options, }); -/** - * Get Health - * @returns HealthInfoSchema Successful Response - * @throws ApiError - */ -export const useMonitorServiceGetHealth = < - TData = Common.MonitorServiceGetHealthDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, - ...options, - }); /** * Get Plugins * @param data The data for the request. @@ -1684,25 +1667,6 @@ export const useVariableServiceGetVariables = < VariableService.getVariables({ limit, offset, orderBy }) as TData, ...options, }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const useVersionServiceGetVersion = < - TData = Common.VersionServiceGetVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), - queryFn: () => VersionService.getVersion() as TData, - ...options, - }); /** * Get Xcom Entry * Get an XCom entry. @@ -1759,6 +1723,43 @@ export const useXcomServiceGetXcomEntry = < }) as TData, ...options, }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealth = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const useVersionServiceGetVersion = < + TData = Common.VersionServiceGetVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, + ...options, + }); /** * Create Backfill * @param data The data for the request. @@ -1879,6 +1880,52 @@ export const useConnectionServiceTestConnection = < }) as unknown as Promise, ...options, }); +/** + * Clear Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagRunServiceClearDagRun = < + TData = Common.DagRunServiceClearDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody }) => + DagRunService.clearDagRun({ + dagId, + dagRunId, + requestBody, + }) as unknown as Promise, + ...options, + }); /** * Post Pool * Create a Pool. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index cbfddd5e4a92..d23daf02b92a 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -1009,24 +1009,6 @@ export const useImportErrorServiceGetImportErrorsSuspense = < ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options, }); -/** - * Get Health - * @returns HealthInfoSchema Successful Response - * @throws ApiError - */ -export const useMonitorServiceGetHealthSuspense = < - TData = Common.MonitorServiceGetHealthDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, - ...options, - }); /** * Get Plugins * @param data The data for the request. @@ -1669,25 +1651,6 @@ export const useVariableServiceGetVariablesSuspense = < VariableService.getVariables({ limit, offset, orderBy }) as TData, ...options, }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const useVersionServiceGetVersionSuspense = < - TData = Common.VersionServiceGetVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), - queryFn: () => VersionService.getVersion() as TData, - ...options, - }); /** * Get Xcom Entry * Get an XCom entry. @@ -1744,3 +1707,40 @@ export const useXcomServiceGetXcomEntrySuspense = < }) as TData, ...options, }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealthSuspense = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const useVersionServiceGetVersionSuspense = < + TData = Common.VersionServiceGetVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 6db4ec1e1b7d..cae722f2f2b9 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -103,8 +103,7 @@ export const $AssetAliasSchema = { type: "object", required: ["id", "name"], title: "AssetAliasSchema", - description: - "Serializable version of the AssetAliasSchema ORM SqlAlchemyModel.", + description: "Asset alias serializer for assets.", } as const; export const $AssetCollectionResponse = { @@ -1304,6 +1303,19 @@ export const $DAGResponse = { description: "DAG serializer for responses.", } as const; +export const $DAGRunClearBody = { + properties: { + dry_run: { + type: "boolean", + title: "Dry Run", + default: true, + }, + }, + type: "object", + title: "DAGRunClearBody", + description: "DAG Run serializer for clear endpoint body.", +} as const; + export const $DAGRunPatchBody = { properties: { state: { @@ -2012,8 +2024,7 @@ export const $DagScheduleAssetReference = { type: "object", required: ["dag_id", "created_at", "updated_at"], title: "DagScheduleAssetReference", - description: - "Serializable version of the DagScheduleAssetReference ORM SqlAlchemyModel.", + description: "DAG schedule reference serializer for assets.", } as const; export const $DagStatsCollectionResponse = { @@ -3369,8 +3380,7 @@ export const $TaskOutletAssetReference = { type: "object", required: ["dag_id", "task_id", "created_at", "updated_at"], title: "TaskOutletAssetReference", - description: - "Serializable version of the TaskOutletAssetReference ORM SqlAlchemyModel.", + description: "Task outlet reference serializer for assets.", } as const; export const $TaskResponse = { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 94ce934ca70a..edc28bfe1c78 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -47,6 +47,8 @@ import type { DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, + ClearDagRunData, + ClearDagRunResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, @@ -77,7 +79,6 @@ import type { GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, - GetHealthResponse, GetPluginsData, GetPluginsResponse, DeletePoolData, @@ -116,9 +117,10 @@ import type { GetVariablesResponse, PostVariableData, PostVariableResponse, - GetVersionResponse, GetXcomEntryData, GetXcomEntryResponse, + GetHealthResponse, + GetVersionResponse, } from "./types.gen"; export class AssetService { @@ -769,6 +771,36 @@ export class DagRunService { }, }); } + + /** + * Clear Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ + public static clearDagRun( + data: ClearDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/clear", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } } export class DagSourceService { @@ -1269,24 +1301,6 @@ export class ImportErrorService { } } -export class MonitorService { - /** - * Get Health - * @returns HealthInfoSchema Successful Response - * @throws ApiError - */ - public static getHealth(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/public/monitor/health", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - }, - }); - } -} - export class PluginService { /** * Get Plugins @@ -1918,25 +1932,6 @@ export class VariableService { } } -export class VersionService { - /** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ - public static getVersion(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/public/version/", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - }, - }); - } -} - export class XcomService { /** * Get Xcom Entry @@ -1979,3 +1974,32 @@ export class XcomService { }); } } + +export class MonitorService { + /** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ + public static getHealth(): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/monitor/health", + }); + } +} + +export class VersionService { + /** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ + public static getVersion(): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/version/", + }); + } +} diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 340feb1b4862..646e4086906e 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -22,7 +22,7 @@ export type AppBuilderViewResponse = { }; /** - * Serializable version of the AssetAliasSchema ORM SqlAlchemyModel. + * Asset alias serializer for assets. */ export type AssetAliasSchema = { id: number; @@ -273,6 +273,13 @@ export type DAGResponse = { readonly file_token: string; }; +/** + * DAG Run serializer for clear endpoint body. + */ +export type DAGRunClearBody = { + dry_run?: boolean; +}; + /** * DAG Run Serializer for PATCH requests. */ @@ -457,7 +464,7 @@ export type DagRunType = | "asset_triggered"; /** - * Serializable version of the DagScheduleAssetReference ORM SqlAlchemyModel. + * DAG schedule reference serializer for assets. */ export type DagScheduleAssetReference = { dag_id: string; @@ -826,7 +833,7 @@ export type TaskInstanceStateCount = { }; /** - * Serializable version of the TaskOutletAssetReference ORM SqlAlchemyModel. + * Task outlet reference serializer for assets. */ export type TaskOutletAssetReference = { dag_id: string; @@ -1138,6 +1145,16 @@ export type PatchDagRunData = { export type PatchDagRunResponse = DAGRunResponse; +export type ClearDagRunData = { + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; +}; + +export type ClearDagRunResponse = + | TaskInstanceCollectionResponse + | DAGRunResponse; + export type GetDagSourceData = { accept?: string; fileToken: string; @@ -1272,8 +1289,6 @@ export type GetImportErrorsData = { export type GetImportErrorsResponse = ImportErrorCollectionResponse; -export type GetHealthResponse = HealthInfoSchema; - export type GetPluginsData = { limit?: number; offset?: number; @@ -1449,8 +1464,6 @@ export type PostVariableData = { export type PostVariableResponse = VariableResponse; -export type GetVersionResponse = VersionInfo; - export type GetXcomEntryData = { dagId: string; dagRunId: string; @@ -1463,6 +1476,10 @@ export type GetXcomEntryData = { export type GetXcomEntryResponse = XComResponseNative | XComResponseString; +export type GetHealthResponse = HealthInfoSchema; + +export type GetVersionResponse = VersionInfo; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -2036,6 +2053,33 @@ export type $OpenApiTs = { }; }; }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/clear": { + post: { + req: ClearDagRunData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse | DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/dagSources/{file_token}": { get: { req: GetDagSourceData; @@ -2447,24 +2491,6 @@ export type $OpenApiTs = { }; }; }; - "/public/monitor/health": { - get: { - res: { - /** - * Successful Response - */ - 200: HealthInfoSchema; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - }; - }; - }; "/public/plugins/": { get: { req: GetPluginsData; @@ -2962,24 +2988,6 @@ export type $OpenApiTs = { }; }; }; - "/public/version/": { - get: { - res: { - /** - * Successful Response - */ - 200: VersionInfo; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - }; - }; - }; "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}": { get: { req: GetXcomEntryData; @@ -3011,4 +3019,24 @@ export type $OpenApiTs = { }; }; }; + "/public/monitor/health": { + get: { + res: { + /** + * Successful Response + */ + 200: HealthInfoSchema; + }; + }; + }; + "/public/version/": { + get: { + res: { + /** + * Successful Response + */ + 200: VersionInfo; + }; + }; + }; }; diff --git a/airflow/ui/src/components/DagRunInfo.tsx b/airflow/ui/src/components/DagRunInfo.tsx index 4cc2f7027370..0d30e9c7667c 100644 --- a/airflow/ui/src/components/DagRunInfo.tsx +++ b/airflow/ui/src/components/DagRunInfo.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { VStack, Text, Box, HStack } from "@chakra-ui/react"; +import { VStack, Text, HStack } from "@chakra-ui/react"; import dayjs from "dayjs"; import type { DAGRunResponse } from "openapi/requests/types.gen"; @@ -24,6 +24,8 @@ import Time from "src/components/Time"; import { Tooltip } from "src/components/ui"; import { stateColor } from "src/utils/stateColor"; +import { StateCircle } from "./StateCircle"; + type Props = { readonly dataIntervalEnd?: string | null; readonly dataIntervalStart?: string | null; @@ -81,13 +83,7 @@ const DagRunInfo = ({