From 32d4b63c5993d1dfb9b66040e2d214880da1fbf8 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Sat, 2 Nov 2024 01:51:55 +0530 Subject: [PATCH 001/137] add min versions to types- tabulate,termcolor,toml (#43570) --- hatch_build.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hatch_build.py b/hatch_build.py index 7bc7cf2ccd1d..a362d7a5fcad 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -230,9 +230,9 @@ # hence, 2.31.0.6 is required for aiobotocore>=2.9.0 "types-requests>=2.31.0.6", "types-setuptools>=69.5.0.20240423", - "types-tabulate", - "types-termcolor", - "types-toml", + "types-tabulate>=0.9.0.20240106", + "types-termcolor>=1.1.6.2", + "types-toml>=0.10.8.20240310", ], "devel-sentry": [ "blinker>=1.7.0", From 1f7a58a5172bfd6dacf09d446f48e2cd5edca3d5 Mon Sep 17 00:00:00 2001 From: Kunal Bhattacharya Date: Sat, 2 Nov 2024 02:00:03 +0530 Subject: [PATCH 002/137] Add copy_object functionality for wasbhook (#43037) * Add copy_object functionality for wasbhook * Add test for copy_blobs * Static check fixes * Pytest fix * Update test * Static check fixes --- .../providers/microsoft/azure/hooks/wasb.py | 27 +++++++++++++++++++ .../tests/microsoft/azure/hooks/test_wasb.py | 27 +++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py b/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py index 36cafd9933bd..014be1903c14 100644 --- a/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py +++ b/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py @@ -493,6 +493,33 @@ def delete_blobs(self, container_name: str, *blobs, **kwargs) -> None: self._get_container_client(container_name).delete_blobs(*blobs, **kwargs) self.log.info("Deleted blobs: %s", blobs) + def copy_blobs( + self, + source_container_name: str, + source_blob_name: str, + destination_container_name: str, + destination_blob_name: str, + ) -> None: + """ + Copy the specified blobs from one blob prefix to another. + + :param source_container_name: The name of the source container containing the blobs. + :param source_blob_name: The full source blob path without the container name. + :param destination_container_name: The name of the destination container where the blobs + will be copied to. + :param destination_blob_name: The full destination blob path without the container name. + """ + source_blob_client = self._get_blob_client( + container_name=source_container_name, blob_name=source_blob_name + ) + source_blob_url = source_blob_client.url + + destination_blob_client = self._get_blob_client( + container_name=destination_container_name, blob_name=destination_blob_name + ) + + destination_blob_client.start_copy_from_url(source_blob_url) + def delete_file( self, container_name: str, diff --git a/providers/tests/microsoft/azure/hooks/test_wasb.py b/providers/tests/microsoft/azure/hooks/test_wasb.py index 39c4b7505250..b696b38485e3 100644 --- a/providers/tests/microsoft/azure/hooks/test_wasb.py +++ b/providers/tests/microsoft/azure/hooks/test_wasb.py @@ -580,6 +580,33 @@ def test_delete_more_than_256_blobs(self, mock_check, mock_get_blobslist, mock_d # `ContainerClient.delete_blobs()` in this test. assert mock_delete_blobs.call_count == 2 + @mock.patch.object(WasbHook, "_get_blob_client") + def test_copy_blobs(self, mock_get_blob_client): + # Arrange + hook = WasbHook(wasb_conn_id=self.azure_shared_key_test) + source_container_name = "source-container" + source_blob_name = "source-blob" + destination_container_name = "destination-container" + destination_blob_name = "destination-blob" + + # Mock the blob clients + mock_source_blob_client = mock.MagicMock() + mock_destination_blob_client = mock.MagicMock() + mock_get_blob_client.side_effect = [mock_source_blob_client, mock_destination_blob_client] + + # Mock the URL of the source blob + mock_source_blob_client.url = "https://source-url" + + hook.copy_blobs( + source_container_name, source_blob_name, destination_container_name, destination_blob_name + ) + + mock_get_blob_client.assert_any_call(container_name=source_container_name, blob_name=source_blob_name) + mock_get_blob_client.assert_any_call( + container_name=destination_container_name, blob_name=destination_blob_name + ) + mock_destination_blob_client.start_copy_from_url.assert_called_once_with("https://source-url") + @mock.patch.object(WasbHook, "get_blobs_list") @mock.patch.object(WasbHook, "check_for_blob") def test_delete_nonexisting_blob_fails(self, mock_check, mock_getblobs, mocked_blob_service_client): From c6d6ca2161bfc4e732962c3649bba8ae221b6760 Mon Sep 17 00:00:00 2001 From: Niko Oliveira Date: Fri, 1 Nov 2024 13:50:32 -0700 Subject: [PATCH 003/137] More executor cleanup to remove Plugin support (#43598) --- airflow/executors/executor_constants.py | 1 - airflow/executors/executor_loader.py | 11 +++++------ airflow/executors/executor_utils.py | 11 +---------- tests/executors/test_executor_loader.py | 4 ++-- 4 files changed, 8 insertions(+), 19 deletions(-) diff --git a/airflow/executors/executor_constants.py b/airflow/executors/executor_constants.py index 4e4923beb477..65d814f28ac8 100644 --- a/airflow/executors/executor_constants.py +++ b/airflow/executors/executor_constants.py @@ -24,7 +24,6 @@ class ConnectorSource(Enum): """Enum of supported executor import sources.""" CORE = "core" - PLUGIN = "plugin" CUSTOM_PATH = "custom path" diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index f74153f95fc9..7fc0bd63e980 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -94,7 +94,7 @@ def _get_executor_names(cls) -> list[ExecutorName]: # paths won't be provided by the user in that case. if core_executor_module := cls.executors.get(name): executor_names.append(ExecutorName(alias=name, module_path=core_executor_module)) - # Only a module path or plugin name was provided + # A module path was provided else: executor_names.append(ExecutorName(alias=None, module_path=name)) # An alias was provided with the module path @@ -104,12 +104,12 @@ def _get_executor_names(cls) -> list[ExecutorName]: # (e.g. my_local_exec_alias:LocalExecutor). Allowing this makes things unnecessarily # complicated. Multiple Executors of the same type will be supported by a future multitenancy # AIP. - # The module component should always be a module or plugin path. + # The module component should always be a module path. module_path = split_name[1] if not module_path or module_path in CORE_EXECUTOR_NAMES or "." not in module_path: raise AirflowConfigException( "Incorrectly formatted executor configuration. Second portion of an executor " - f"configuration must be a module path or plugin but received: {module_path}" + f"configuration must be a module path but received: {module_path}" ) else: executor_names.append(ExecutorName(alias=split_name[0], module_path=split_name[1])) @@ -117,7 +117,7 @@ def _get_executor_names(cls) -> list[ExecutorName]: raise AirflowConfigException(f"Incorrectly formatted executor configuration: {name}") # As of now, we do not allow duplicate executors. - # Add all module paths/plugin names to a set, since the actual code is what is unique + # Add all module paths to a set, since the actual code is what is unique unique_modules = set([exec_name.module_path for exec_name in executor_names]) if len(unique_modules) < len(executor_names): msg = ( @@ -216,7 +216,6 @@ def load_executor(cls, executor_name: ExecutorName | str | None) -> BaseExecutor This supports the following formats: * by executor name for core executor - * by ``{plugin_name}.{class_name}`` for executor from plugins * by import path * by class name of the Executor * by ExecutorName object specification @@ -271,7 +270,7 @@ def import_executor_cls( Supports the same formats as ExecutorLoader.load_executor. - :param executor_name: Name of core executor or module path to provider provided as a plugin. + :param executor_name: Name of core executor or module path to executor. :param validate: Whether or not to validate the executor before returning :return: executor class via executor_name and executor import source diff --git a/airflow/executors/executor_utils.py b/airflow/executors/executor_utils.py index 8b67e96e4e53..016e01d8d0c3 100644 --- a/airflow/executors/executor_utils.py +++ b/airflow/executors/executor_utils.py @@ -31,17 +31,8 @@ def __init__(self, module_path, alias=None): def set_connector_source(self): if self.alias in CORE_EXECUTOR_NAMES: self.connector_source = ConnectorSource.CORE - # If there is only one dot, then this is likely a plugin. This is the best we can do - # to determine. - elif self.module_path.count(".") == 1: - self.log.debug( - "The executor name looks like the plugin path (executor_name=%s) due to having " - "just two period delimited parts. Treating executor as a plugin", - self.module_path, - ) - self.connector_source = ConnectorSource.PLUGIN - # Executor must be a module else: + # Executor must be a module self.connector_source = ConnectorSource.CUSTOM_PATH def __repr__(self): diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index 68bc02a6300e..325345247a29 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -102,7 +102,7 @@ def test_should_support_custom_path(self): ), ], ), - # Core executors and custom module path executor and plugin + # Core executors and custom module path executor ( "CeleryExecutor, LocalExecutor, tests.executors.test_executor_loader.FakeExecutor", [ @@ -120,7 +120,7 @@ def test_should_support_custom_path(self): ), ], ), - # Core executors and custom module path executor and plugin with aliases + # Core executors and custom module path executor with aliases ( ( "CeleryExecutor, LocalExecutor, fake_exec:tests.executors.test_executor_loader.FakeExecutor" From 681c59a27c1e0414bf1843c569fad9d0ec407456 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Fri, 1 Nov 2024 23:33:10 +0100 Subject: [PATCH 004/137] Remove root warning in image used to build packages in CI (#43597) --- .../src/airflow_breeze/commands/release_management_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index e81220032387..f906ffaa4425 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -243,7 +243,7 @@ class VersionedFile(NamedTuple): AIRFLOW_BUILD_DOCKERFILE = f""" FROM python:{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}-slim-{ALLOWED_DEBIAN_VERSIONS[0]} RUN apt-get update && apt-get install -y --no-install-recommends git -RUN pip install pip=={AIRFLOW_PIP_VERSION} hatch=={HATCH_VERSION} pyyaml=={PYYAML_VERSION}\ +RUN pip install --root-user-action ignore pip=={AIRFLOW_PIP_VERSION} hatch=={HATCH_VERSION} pyyaml=={PYYAML_VERSION}\ gitpython=={GITPYTHON_VERSION} rich=={RICH_VERSION} pre-commit=={PRE_COMMIT_VERSION} COPY . /opt/airflow """ From eefddcbd6aee7c56ed9a82ee6049c87357abc72e Mon Sep 17 00:00:00 2001 From: Zach Liu Date: Fri, 1 Nov 2024 19:45:08 -0400 Subject: [PATCH 005/137] next_execution_date explained in case of manual triggers (#43535) aligned with comments at https://github.com/apache/airflow/blob/35087d7d10714130cc3e9e9730e34b07fc56938d/airflow/models/taskinstance.py#L1032-L1035 --- docs/apache-airflow/templates-ref.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/apache-airflow/templates-ref.rst b/docs/apache-airflow/templates-ref.rst index 17780bc323c7..b29690c096ba 100644 --- a/docs/apache-airflow/templates-ref.rst +++ b/docs/apache-airflow/templates-ref.rst @@ -118,8 +118,10 @@ existing code to use other variables instead. Deprecated Variable Description ===================================== ========================================================================== ``{{ execution_date }}`` the execution date (logical date), same as ``logical_date`` -``{{ next_execution_date }}`` the logical date of the next scheduled run (if applicable); - you may be able to use ``data_interval_end`` instead +``{{ next_execution_date }}`` the logical date of the next scheduled run, + you may be able to use ``data_interval_end`` instead; for manually + triggered dagruns that aren't on a schedule, ``next_execution_date`` is + set to ``logical_date`` ``{{ next_ds }}`` the next execution date as ``YYYY-MM-DD`` if exists, else ``None`` ``{{ next_ds_nodash }}`` the next execution date as ``YYYYMMDD`` if exists, else ``None`` ``{{ prev_execution_date }}`` the logical date of the previous scheduled run (if applicable) From c1bd9c5c0141ec8b981e12232224ee1595cb83d1 Mon Sep 17 00:00:00 2001 From: "D. Ferruzzi" Date: Fri, 1 Nov 2024 17:57:04 -0700 Subject: [PATCH 006/137] Bedrock retired the Llama2 model, updated the test to use the closest replacement Llama3 model. (#43600) --- providers/tests/system/amazon/aws/example_bedrock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/tests/system/amazon/aws/example_bedrock.py b/providers/tests/system/amazon/aws/example_bedrock.py index 3caa9f3a3539..bc28be30174e 100644 --- a/providers/tests/system/amazon/aws/example_bedrock.py +++ b/providers/tests/system/amazon/aws/example_bedrock.py @@ -63,7 +63,7 @@ SKIP_PROVISION_THROUGHPUT = environ.get("SKIP_RESTRICTED_SYSTEM_TEST_TASKS", default=True) -LLAMA_SHORT_MODEL_ID = "meta.llama2-13b-chat-v1" +LLAMA_SHORT_MODEL_ID = "meta.llama3-8b-instruct-v1:0" TITAN_MODEL_ID = "amazon.titan-text-express-v1:0:8k" TITAN_SHORT_MODEL_ID = TITAN_MODEL_ID.split(":")[0] From 59ea748f740a89ec17ece00ca874e5ac56b7e28f Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Sat, 2 Nov 2024 15:40:51 +0530 Subject: [PATCH 007/137] Add min version to ipykernel,scrapbook, pywinrm (#43603) * add min version to pywinrm, ipykernel * add min version to scrapbook --- generated/provider_dependencies.json | 6 +++--- .../src/airflow/providers/microsoft/azure/provider.yaml | 2 +- providers/src/airflow/providers/papermill/provider.yaml | 4 ++-- providers/tests/system/papermill/conftest.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index ed1cd80edc9f..55d4d37c7ff1 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -841,7 +841,7 @@ "msgraph-core>=1.0.0" ], "devel-deps": [ - "pywinrm" + "pywinrm>=0.4" ], "plugins": [], "cross-providers-deps": [ @@ -1043,11 +1043,11 @@ "papermill": { "deps": [ "apache-airflow>=2.8.0", - "ipykernel", + "ipykernel>=6.29.4", "pandas>=1.5.3,<2.2;python_version<\"3.9\"", "pandas>=2.1.2,<2.2;python_version>=\"3.9\"", "papermill[all]>=2.6.0", - "scrapbook[all]" + "scrapbook[all]>=0.5.0" ], "devel-deps": [], "plugins": [], diff --git a/providers/src/airflow/providers/microsoft/azure/provider.yaml b/providers/src/airflow/providers/microsoft/azure/provider.yaml index eeb975d1e7e1..03a8cd2320e5 100644 --- a/providers/src/airflow/providers/microsoft/azure/provider.yaml +++ b/providers/src/airflow/providers/microsoft/azure/provider.yaml @@ -119,7 +119,7 @@ dependencies: - microsoft-kiota-abstractions<1.4.0 devel-dependencies: - - pywinrm + - pywinrm>=0.4 integrations: - integration-name: Microsoft Azure Batch diff --git a/providers/src/airflow/providers/papermill/provider.yaml b/providers/src/airflow/providers/papermill/provider.yaml index daed40998295..2118e4c2514d 100644 --- a/providers/src/airflow/providers/papermill/provider.yaml +++ b/providers/src/airflow/providers/papermill/provider.yaml @@ -54,8 +54,8 @@ versions: dependencies: - apache-airflow>=2.8.0 - papermill[all]>=2.6.0 - - scrapbook[all] - - ipykernel + - scrapbook[all]>=0.5.0 + - ipykernel>=6.29.4 - pandas>=2.1.2,<2.2;python_version>="3.9" - pandas>=1.5.3,<2.2;python_version<"3.9" diff --git a/providers/tests/system/papermill/conftest.py b/providers/tests/system/papermill/conftest.py index 1ccc1510a489..32759ed056e1 100644 --- a/providers/tests/system/papermill/conftest.py +++ b/providers/tests/system/papermill/conftest.py @@ -36,7 +36,7 @@ def remote_kernel(request): [ "python3", "-m", - "ipykernel", + "ipykernel>=6.29.4", '--Session.key=b""', f"--hb={JUPYTER_KERNEL_HB_PORT}", f"--shell={JUPYTER_KERNEL_SHELL_PORT}", From a8921ae57a53d16150f985305a7252222647f15c Mon Sep 17 00:00:00 2001 From: GPK Date: Sat, 2 Nov 2024 16:18:39 +0000 Subject: [PATCH 008/137] remove un used python_version param argument (#43609) --- dev/breeze/src/airflow_breeze/utils/run_tests.py | 8 ++------ dev/breeze/tests/test_pytest_args_for_test_types.py | 2 -- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 5f0cd14938ff..6379eda9e759 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -309,9 +309,7 @@ def generate_args_for_pytest( ): result_log_file, warnings_file, coverage_file = test_paths(test_type, backend, helm_test_package) if skip_db_tests and parallel_test_types_list: - args = convert_parallel_types_to_folders( - parallel_test_types_list, skip_provider_tests, python_version=python_version - ) + args = convert_parallel_types_to_folders(parallel_test_types_list, skip_provider_tests) else: args = convert_test_type_to_pytest_args( test_type=test_type, @@ -394,9 +392,7 @@ def generate_args_for_pytest( return args -def convert_parallel_types_to_folders( - parallel_test_types_list: list[str], skip_provider_tests: bool, python_version: str -): +def convert_parallel_types_to_folders(parallel_test_types_list: list[str], skip_provider_tests: bool): args = [] for _test_type in parallel_test_types_list: args.extend( diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index e149db971d7e..94a229802ad3 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -18,7 +18,6 @@ import pytest -from airflow_breeze.global_constants import DEFAULT_PYTHON_MAJOR_MINOR_VERSION from airflow_breeze.utils.run_tests import convert_parallel_types_to_folders, convert_test_type_to_pytest_args @@ -347,7 +346,6 @@ def test_folders_for_parallel_test_types( convert_parallel_types_to_folders( parallel_test_types_list=parallel_test_types.split(" "), skip_provider_tests=skip_provider_tests, - python_version=DEFAULT_PYTHON_MAJOR_MINOR_VERSION, ) == folders ) From 17e51009a730ea5ea68b50d8c19cb9e5f04ee141 Mon Sep 17 00:00:00 2001 From: Karthikeyan Singaravelan Date: Sat, 2 Nov 2024 22:47:17 +0530 Subject: [PATCH 009/137] Check for empty list of integration tests before evaluation inside matrix that requires a value. (#43546) --- .github/workflows/integration-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e831350f5b18..530d0f9fc563 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -59,6 +59,7 @@ on: # yamllint disable-line rule:truthy jobs: tests-integration: timeout-minutes: 130 + if: inputs.testable-integrations != '[]' name: "Integration Tests: ${{ matrix.integration }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} strategy: From 229c6a3e4673db03119c78828e546ebae272ef75 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 2 Nov 2024 20:32:23 +0100 Subject: [PATCH 010/137] Explain how to use uv with airflow virtualenv and make it works (#43604) Since we are switching to ``uv`` as our main development tooling, we should explain how to use ``uv`` with airflow and explain some basic commands that should be used to have a workign uv-manaaged venv. This documentation explains some why's and initial hows with uv, also it fixes uv to work on macos with some default extras - such as devel, devel-tests and --all-extras, so that it works on a wider range of systems (includin MacOS). This includes making plyvel not installed on MacOS, because it's next to impossible to compile levelDB on a modern MacOS Operating system and it is anyway an optional component of google provider. Fixes: #43200 --- .gitignore | 3 + airflow/settings.py | 10 +- airflow/utils/dot_renderer.py | 4 +- contributing-docs/07_local_virtualenv.rst | 300 +++++++----------- generated/provider_dependencies.json | 2 +- hatch_build.py | 10 +- .../apache/hive/transfers/mysql_to_hive.py | 11 +- .../google/cloud/transfers/mysql_to_gcs.py | 11 +- .../airflow/providers/mysql/hooks/mysql.py | 30 +- .../src/airflow/providers/mysql/provider.yaml | 5 +- .../mysql/transfers/vertica_to_mysql.py | 10 +- providers/tests/mysql/operators/test_mysql.py | 10 +- 12 files changed, 210 insertions(+), 196 deletions(-) diff --git a/.gitignore b/.gitignore index a9c055041d98..84afbd474102 100644 --- a/.gitignore +++ b/.gitignore @@ -254,3 +254,6 @@ licenses/LICENSES-ui.txt # airflow-build-dockerfile and correconding ignore file airflow-build-dockerfile* + +# Temporary ignore uv.lock until we integrate it fully in our constraint preparation mechanism +/uv.lock diff --git a/airflow/settings.py b/airflow/settings.py index 57c382e2a1a1..89c77a2abfe3 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -617,7 +617,15 @@ def configure_adapters(): if SQL_ALCHEMY_CONN.startswith("mysql"): try: - import MySQLdb.converters + try: + import MySQLdb.converters + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) MySQLdb.converters.conversions[Pendulum] = MySQLdb.converters.DateTime2literal except ImportError: diff --git a/airflow/utils/dot_renderer.py b/airflow/utils/dot_renderer.py index ec0b836cbc06..877d16450d70 100644 --- a/airflow/utils/dot_renderer.py +++ b/airflow/utils/dot_renderer.py @@ -27,7 +27,9 @@ import graphviz except ImportError: warnings.warn( - "Could not import graphviz. Rendering graph to the graphical format will not be possible.", + "Could not import graphviz. Rendering graph to the graphical format will not be possible. \n" + "You might need to install the graphviz package and necessary system packages.\n" + "Run `pip install graphviz` to attempt to install it.", UserWarning, stacklevel=2, ) diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index 48dc8a2fe08a..19f41e920e38 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -28,11 +28,8 @@ That's why we recommend using local virtualenv for development and testing. **The outline for this document in GitHub is available at top-right corner button (with 3-dots and 3 lines).** -Installation in local virtualenv --------------------------------- - Required Software Packages -.......................... +-------------------------- Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: @@ -42,8 +39,12 @@ Homebrew for macOS to install required software packages: * libxml * helm (only for helm chart tests) -Refer to the `Dockerfile.ci <../Dockerfile.ci>`__ for a comprehensive list -of required packages. +There are also sometimes other system level packages needed to install python packages - especially +those that are coming from providers. For example you might need to install ``pkgconf`` to be able to +install ``mysqlclient`` package for ``mysql`` provider . Or you might need to install ``graphviz`` to be able to install +``devel`` extra bundle. + +Please refer to the `Dockerfile.ci <../Dockerfile.ci>`__ for a comprehensive list of required packages. .. note:: @@ -61,26 +62,114 @@ of required packages. released wheel packages. -Installing Airflow -.................. +Creating and maintaining local virtualenv with uv +------------------------------------------------- + +As of November 2024 we are recommending to use ``uv`` for local virtualenv management for Airflow development. +The ``uv`` utility is a build frontend tool that is designed to manage python, virtualenvs and workspaces for development +and testing of Python projects. It is a modern tool that is designed to work with PEP 517/518 compliant projects +and it is much faster than "reference" ``pip`` tool. It has extensive support to not only create development +environment but also to manage python versions, development environments, workspaces and Python tools used +to develop Airflow (via ``uv tool`` command - such as ``pre-commit`` and others, you can also use ``uv tool`` +to install ``breeze`` - containerized development environment for Airflow that we use to reproduce the +CI environment locally and to run release-management and certain development tasks. + +You can read more about ``uv`` in `UV Getting started `_ but +below you will find a few typical steps to get you started with ``uv``. + +Installing uv +............. + +You can follow the `installation instructions `_ to install +``uv`` on your system. Once you have ``uv`` installed, you can do all the environment preparation tasks using +``uv`` commands. + +Installing Python versions +.......................... + +You can install Python versions using ``uv python install`` command. For example, to install Python 3.9.7, you can run: + +.. code:: bash + + uv python install 3.9.7 + +This is optional step - ``uv`` will automatically install the Python version you need when you create a virtualenv. + +Creating virtualenvs with uv +............................ + +.. code:: bash + + uv venv + +This will create a default venv in your project's ``.venv`` directory. You can also create a venv +with a specific Python version by running: + +.. code:: bash + + uv venv --python 3.9.7 -The simplest way to install Airflow in local virtualenv is to use ``pip``: +You can also create a venv with a different venv directory name by running: .. code:: bash - pip install -e ".[devel,]" # for example: pip install -e ".[devel,google,postgres]" + uv venv .my-venv + +However ``uv`` creation/re-creation of venvs is so fast that you can easily create and delete venvs as needed. +So usually you do not need to have more than one venv and recreate it as needed - for example when you +need to change the python version. + +Syncing project (including providers) with uv +............................................. + +In a project like airflow it's important to have a consistent set of dependencies across all developers. +You can use ``uv sync`` to install dependencies from ``pyproject.toml`` file. This will install all dependencies +from the ``pyproject.toml`` file in the current directory. + +.. code:: bash + + uv sync + +If you also need to install development and provider dependencies you can specify extras for that providers: + +.. code:: bash + + uv sync --extra devel --extra devel-tests --extra google + +This will synchronize all extras that you need for development and testing of Airflow and google provider +dependencies - including their runtime dependencies. + +.. code:: bash + + uv sync --all-extras + +This will synchronize all extras of airflow (this might require some system dependencies to be installed). + + +Creating and installing airflow with other build-frontends +---------------------------------------------------------- + +While ``uv`` uses ``workspace`` feature to synchronize both Airflow and Providers in a single sync +command, you can still use other frontend tools (such as ``pip``) to install Airflow and Providers +and to develop them without relying on ``sync`` and ``workspace`` features of ``uv``. Below chapters +describe how to do it with ``pip``. + +Installing Airflow with pip +........................... + +Since Airflow follows the standards define by the packaging community, we are not bound with +``uv`` as the only tool to manage virtualenvs - and you can use any other compliant frontends to install +airflow for development. The standard way of installing environment with dependencies necessary to +run tests is to use ``pip`` to install airflow dependencies: + +.. code:: bash + + pip install -e ".[devel,devel-tests,]" # for example: pip install -e ".[devel,devel-tests,google,postgres]" This will install Airflow in 'editable' mode - where sources of Airflow are taken directly from the source code rather than moved to the installation directory. You need to run this command in the virtualenv you want to install Airflow in - and you need to have the virtualenv activated. -While you can use any virtualenv manager, we recommend using `Hatch `__ -as your development environment front-end, and we already use Hatch backend ``hatchling`` for Airflow. - -Hatchling is automatically installed when you build Airflow but since airflow build system uses -``PEP`` compliant ``pyproject.toml`` file, you can use any front-end build system that supports -``PEP 517`` and ``PEP 518``. You can also use ``pip`` to install Airflow in editable mode. - Extras (optional dependencies) .............................. @@ -145,169 +234,6 @@ both runtime and development dependencies of the google provider. The second one installs providers source code in development mode, so that modifications to the code are automatically reflected in your installed virtualenv. -Using Hatch ------------ - -Airflow uses `hatch `_ as a build and development tool of choice. It is one of popular -build tools and environment managers for Python, maintained by the Python Packaging Authority. -It is an optional tool that is only really needed when you want to build packages from sources, but -it is also very convenient to manage your Python versions and virtualenvs. - -Airflow project contains some pre-defined virtualenv definitions in ``pyproject.toml`` that can be -easily used by hatch to create your local venvs. This is not necessary for you to develop and test -Airflow, but it is a convenient way to manage your local Python versions and virtualenvs. - -Installing Hatch -................ - -You can install hatch using various other ways (including Gui installers). - -Example using ``pipx``: - -.. code:: bash - - pipx install hatch - -We recommend using ``pipx`` as you can manage installed Python apps easily and later use it -to upgrade ``hatch`` easily as needed with: - -.. code:: bash - - pipx upgrade hatch - -Using Hatch to manage your Python versions -.......................................... - -You can also use hatch to install and manage airflow virtualenvs and development -environments. For example, you can install Python 3.10 with this command: - -.. code:: bash - - hatch python install 3.10 - -or install all Python versions that are used in Airflow: - -.. code:: bash - - hatch python install all - -Manage your virtualenvs with Hatch -.................................. - -Airflow has some pre-defined virtualenvs that you can use to develop and test airflow. -You can see the list of available envs with: - -.. code:: bash - - hatch env show - -This is what it shows currently: - -+-------------+---------+---------------------------------------------------------------+ -| Name | Type | Description | -+=============+=========+===============================================================+ -| default | virtual | Default environment with Python 3.9 for maximum compatibility | -+-------------+---------+---------------------------------------------------------------+ -| airflow-39 | virtual | Environment with Python 3.9. No devel installed. | -+-------------+---------+---------------------------------------------------------------+ -| airflow-310 | virtual | Environment with Python 3.10. No devel installed. | -+-------------+---------+---------------------------------------------------------------+ -| airflow-311 | virtual | Environment with Python 3.11. No devel installed | -+-------------+---------+---------------------------------------------------------------+ -| airflow-312 | virtual | Environment with Python 3.12. No devel installed | -+-------------+---------+---------------------------------------------------------------+ - -The default env (if you have not used one explicitly) is ``default`` and it is a Python 3.9 -virtualenv for maximum compatibility. You can install devel set of dependencies with it -by running: - -.. code:: bash - - pip install -e ".[devel]" - -After entering the environment. - -The other environments are just bare-bones Python virtualenvs with Airflow core requirements only, -without any extras installed and without any tools. They are much faster to create than the default -environment, and you can manually install either appropriate extras or directly tools that you need for -testing or development. - -.. code:: bash - - hatch env create - -You can create specific environment by using them in create command: - -.. code:: bash - - hatch env create airflow-310 - -You can install extras in the environment by running pip command: - -.. code:: bash - - hatch -e airflow-310 run -- pip install -e ".[devel,google]" - -And you can enter the environment with running a shell of your choice (for example zsh) where you -can run any commands - -.. code:: bash - - hatch -e airflow-310 shell - - -Once you are in the environment (indicated usually by updated prompt), you can just install -extra dependencies you need: - -.. code:: bash - - [~/airflow] [airflow-310] pip install -e ".[devel,google]" - - -You can also see where hatch created the virtualenvs and use it in your IDE or activate it manually: - -.. code:: bash - - hatch env find airflow-310 - -You will get path similar to: - -.. code:: - - /Users/jarek/Library/Application Support/hatch/env/virtual/apache-airflow/TReRdyYt/apache-airflow - -Then you will find ``python`` binary and ``activate`` script in the ``bin`` sub-folder of this directory and -you can configure your IDE to use this python virtualenv if you want to use that environment in your IDE. - -You can also set default environment name by HATCH_ENV environment variable. - -You can clean the env by running: - -.. code:: bash - - hatch env prune - -More information about hatch can be found in `Hatch: Environments `__ - -Using Hatch to build your packages -.................................. - -You can use hatch to build installable package from the airflow sources. Such package will -include all metadata that is configured in ``pyproject.toml`` and will be installable with pip. - -The packages will have pre-installed dependencies for providers that are always -installed when Airflow is installed from PyPI. By default both ``wheel`` and ``sdist`` packages are built. - -.. code:: bash - - hatch build - -You can also build only ``wheel`` or ``sdist`` packages: - -.. code:: bash - - hatch build -t wheel - hatch build -t sdist Local and Remote Debugging in IDE --------------------------------- @@ -388,11 +314,11 @@ run the command above and commit the changes to ``pyproject.toml``. Then running install the dependencies automatically when you create or switch to a development environment. -Installing recommended version of dependencies ----------------------------------------------- +Installing "golden" version of dependencies +------------------------------------------- Whatever virtualenv solution you use, when you want to make sure you are using the same -version of dependencies as in main, you can install recommended version of the dependencies by using +version of dependencies as in main, you can install recommended version of the dependencies by using pip: constraint-python.txt files as ``constraint`` file. This might be useful to avoid "works-for-me" syndrome, where you use different version of dependencies than the ones that are used in main, CI tests and by other contributors. @@ -405,6 +331,14 @@ all basic devel requirements and requirements of google provider as last success pip install -e ".[devel,google]" \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" +Or with ``uv``: + +.. code:: bash + + uv pip install -e ".[devel,google]" \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" + + Make sure to use latest main for such installation, those constraints are "development constraints" and they are refreshed several times a day to make sure they are up to date with the latest changes in the main branch. diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 55d4d37c7ff1..88d84a020a35 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -909,7 +909,7 @@ "apache-airflow-providers-common-sql>=1.17.0", "apache-airflow>=2.8.0", "mysql-connector-python>=8.0.29", - "mysqlclient>=1.4.0" + "mysqlclient>=1.4.0; sys_platform != 'darwin'" ], "devel-deps": [], "plugins": [], diff --git a/hatch_build.py b/hatch_build.py index a362d7a5fcad..00832672b081 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -90,7 +90,10 @@ "authlib>=1.0.0", ], "graphviz": [ - "graphviz>=0.12", + # The graphviz package creates friction when installing on MacOS as it needs graphviz system package to + # be installed, and it's really only used for very obscure features of Airflow, so we can skip it on MacOS + # Instead, if someone attempts to use it on MacOS, they will get explanatory error on how to install it + "graphviz>=0.12; sys_platform != 'darwin'", ], "kerberos": [ "pykerberos>=1.1.13", @@ -101,7 +104,10 @@ "python-ldap>=3.4.4", ], "leveldb": [ - "plyvel>=1.5.1", + # The plyvel package is a huge pain when installing on MacOS - especially when Apple releases new + # OS version. It's usually next to impossible to install it at least for a few months after the new + # MacOS version is released. We can skip it on MacOS as this is an optional feature anyway. + "plyvel>=1.5.1; sys_platform != 'darwin'", ], "otel": [ "opentelemetry-exporter-prometheus>=0.47b0", diff --git a/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py index 70cb7f4eee2b..5fca8ef0ccad 100644 --- a/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +++ b/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py @@ -24,7 +24,16 @@ from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Sequence -import MySQLdb +try: + import MySQLdb +except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) + from airflow.models import BaseOperator from airflow.providers.apache.hive.hooks.hive import HiveCliHook diff --git a/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py index 45145a69f713..b0eae584f7b4 100644 --- a/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py +++ b/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py @@ -23,7 +23,16 @@ from datetime import date, datetime, time, timedelta from decimal import Decimal -from MySQLdb.constants import FIELD_TYPE +try: + from MySQLdb.constants import FIELD_TYPE +except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) + from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator from airflow.providers.mysql.hooks.mysql import MySqlHook diff --git a/providers/src/airflow/providers/mysql/hooks/mysql.py b/providers/src/airflow/providers/mysql/hooks/mysql.py index 678c680c6706..5ed8a62d75f2 100644 --- a/providers/src/airflow/providers/mysql/hooks/mysql.py +++ b/providers/src/airflow/providers/mysql/hooks/mysql.py @@ -35,7 +35,15 @@ from mysql.connector.abstracts import MySQLConnectionAbstract except ModuleNotFoundError: logger.warning("The package 'mysql-connector-python' is not installed. Import skipped") - from MySQLdb.connections import Connection as MySQLdbConnection + try: + from MySQLdb.connections import Connection as MySQLdbConnection + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) MySQLConnectionTypes = Union["MySQLdbConnection", "MySQLConnectionAbstract"] @@ -125,7 +133,15 @@ def _get_conn_config_mysql_client(self, conn: Connection) -> dict: if conn_config["charset"].lower() in ("utf8", "utf-8"): conn_config["use_unicode"] = True if conn.extra_dejson.get("cursor", False): - import MySQLdb.cursors + try: + import MySQLdb.cursors + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) cursor_type = conn.extra_dejson.get("cursor", "").lower() # Dictionary mapping cursor types to their respective classes @@ -194,7 +210,15 @@ def get_conn(self) -> MySQLConnectionTypes: client_name = conn.extra_dejson.get("client", "mysqlclient") if client_name == "mysqlclient": - import MySQLdb + try: + import MySQLdb + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) conn_config = self._get_conn_config_mysql_client(conn) return MySQLdb.connect(**conn_config) diff --git a/providers/src/airflow/providers/mysql/provider.yaml b/providers/src/airflow/providers/mysql/provider.yaml index 01cb345f216d..a1ab2229dfd4 100644 --- a/providers/src/airflow/providers/mysql/provider.yaml +++ b/providers/src/airflow/providers/mysql/provider.yaml @@ -70,7 +70,10 @@ versions: dependencies: - apache-airflow>=2.8.0 - apache-airflow-providers-common-sql>=1.17.0 - - mysqlclient>=1.4.0 + # The mysqlclient package creates friction when installing on MacOS as it needs pkg-config to + # Install and compile, and it's really only used by MySQL provider, so we can skip it on MacOS + # Instead, if someone attempts to use it on MacOS, they will get explanatory error on how to install it + - mysqlclient>=1.4.0; sys_platform != 'darwin' - mysql-connector-python>=8.0.29 additional-extras: diff --git a/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py index fd196315d790..ee821b38da4d 100644 --- a/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py +++ b/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py @@ -22,7 +22,15 @@ from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Sequence -import MySQLdb +try: + import MySQLdb +except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) from airflow.models import BaseOperator from airflow.providers.mysql.hooks.mysql import MySqlHook diff --git a/providers/tests/mysql/operators/test_mysql.py b/providers/tests/mysql/operators/test_mysql.py index 75f0aed2935f..fa6fa0167e48 100644 --- a/providers/tests/mysql/operators/test_mysql.py +++ b/providers/tests/mysql/operators/test_mysql.py @@ -97,7 +97,15 @@ def test_overwrite_schema(self, client): conn_id=MYSQL_DEFAULT, ) - from MySQLdb import OperationalError + try: + from MySQLdb import OperationalError + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) with pytest.raises(OperationalError, match="Unknown database 'foobar'"): op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) From ddc5670a8c6f2facb490d3f8de297fb7705d3887 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 2 Nov 2024 22:06:41 +0100 Subject: [PATCH 011/137] mAKe Breeze installation and reinstallation support both uv and pipx (#43607) So far `breeze` fully supported only `pipx` installation. For example it would reinstall itself automatically with pipx if you attempted to run it from another workspace/checked out repository of Airflow, and it only provided instructions for pipx. With this PR: * the `uv tool` is preferred way to install breeze * the `pipx` is PSF-governance managed alternative * breeze will reinstall itself using uv if it has been installed with uv before when it is run from a different workspace or different airflow repo checked out in another folder Also documentation is updated to make `uv` the recommended tool and describing how to install it - with `pipx` provided as an alternative. Warning is printed in case pre-commit-uv is not installed with the pre-commit (pre-commit-uv significantly speeds up installation of the venvs by pre-commit). This warning also provides instructions how to install it. --- .github/actions/install-pre-commit/action.yml | 50 +++++++++++++++++++ .github/workflows/basic-tests.yml | 13 ++--- .github/workflows/static-checks-mypy-docs.yml | 20 ++++---- .../03_contributors_quick_start.rst | 2 +- dev/breeze/doc/01_installation.rst | 25 ++++++++-- .../commands/developer_commands.py | 4 +- .../commands/release_management_commands.py | 15 +++--- .../src/airflow_breeze/global_constants.py | 28 +++++++---- .../airflow_breeze/utils/kubernetes_utils.py | 5 +- .../src/airflow_breeze/utils/path_utils.py | 13 ++--- .../airflow_breeze/utils/python_versions.py | 7 +-- .../src/airflow_breeze/utils/reinstall.py | 13 ++++- .../src/airflow_breeze/utils/run_utils.py | 26 +++++++--- .../ci/pre_commit/common_precommit_utils.py | 7 ++- 14 files changed, 159 insertions(+), 69 deletions(-) create mode 100644 .github/actions/install-pre-commit/action.yml diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml new file mode 100644 index 000000000000..02eea2c72291 --- /dev/null +++ b/.github/actions/install-pre-commit/action.yml @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Install pre-commit' +description: 'Installs pre-commit and related packages' +inputs: + python-version: + description: 'Python version to use' + default: 3.9 + uv-version: + description: 'uv version to use' + default: 0.4.29 + pre-commit-version: + description: 'pre-commit version to use' + default: 4.0.1 + pre-commit-uv-version: + description: 'pre-commit-uv version to use' + default: 4.1.4 +runs: + using: "composite" + steps: + - name: Install pre-commit, uv, and pre-commit-uv + shell: bash + run: > + pip install + pre-commit==${{inputs.pre-commit-version}} + uv==${{inputs.uv-version}} + pre-commit-uv==${{inputs.pre-commit-uv-version}} + - name: Cache pre-commit envs + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{inputs.python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: | + pre-commit-${{inputs.python-version}}- diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 5b6600e560c5..bf7e8ab7bf79 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -285,16 +285,11 @@ jobs: - name: "Install Breeze" uses: ./.github/actions/breeze id: breeze - - name: Cache pre-commit envs - uses: actions/cache@v4 + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit with: - path: ~/.cache/pre-commit - # yamllint disable-line rule:line-length - key: "pre-commit-${{steps.breeze.outputs.host-python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: "\ - pre-commit-${{steps.breeze.outputs.host-python-version}}-\ - ${{ hashFiles('.pre-commit-config.yaml') }}\n - pre-commit-${{steps.breeze.outputs.host-python-version}}-" + python-version: ${{steps.breeze.outputs.host-python-version}} - name: Fetch incoming commit ${{ github.sha }} with its parent uses: actions/checkout@v4 with: diff --git a/.github/workflows/static-checks-mypy-docs.yml b/.github/workflows/static-checks-mypy-docs.yml index b34ad2c36f45..be2c4f8e2864 100644 --- a/.github/workflows/static-checks-mypy-docs.yml +++ b/.github/workflows/static-checks-mypy-docs.yml @@ -126,14 +126,11 @@ jobs: - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image id: breeze - - name: Cache pre-commit envs - uses: actions/cache@v4 + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit with: - path: ~/.cache/pre-commit - # yamllint disable-line rule:line-length - key: "pre-commit-${{steps.breeze.outputs.host-python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: | - pre-commit-${{steps.breeze.outputs.host-python-version}}- + python-version: ${{steps.breeze.outputs.host-python-version}} - name: "Static checks" run: breeze static-checks --all-files --show-diff-on-failure --color always --initialize-environment env: @@ -170,10 +167,13 @@ jobs: - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image id: breeze + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit + with: + python-version: ${{steps.breeze.outputs.host-python-version}} - name: "MyPy checks for ${{ matrix.mypy-check }}" - run: | - pip install pre-commit - pre-commit run --color always --verbose --hook-stage manual ${{matrix.mypy-check}} --all-files + run: pre-commit run --color always --verbose --hook-stage manual ${{matrix.mypy-check}} --all-files env: VERBOSE: "false" COLUMNS: "250" diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index a088e3cb0d4c..96f80220770c 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -476,7 +476,7 @@ You can still add uv support for pre-commit if you use pipx using the commands: pipx install pre-commit pipx inject - pipx inject pre-commit pre-commit-uv + pipx inject prepare_breeze_and_image Also, if you already use ``uvx`` instead of ``pipx``, use this command: diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 1c7ad0ee6283..052dc3faca9f 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -151,13 +151,28 @@ Docker in WSL 2 If VS Code is installed on the Windows host system then in the WSL Linux Distro you can run ``code .`` in the root directory of you Airflow repo to launch VS Code. -The pipx tool --------------- +The uv tool +----------- + +We are recommending to use the ``uv`` tool to manage your virtual environments and generally as a swiss-knife +of your Python environment (it supports installing various versions of Python, creating virtual environments, +installing packages, managing workspaces and running development tools.). + +Installing ``uv`` is described in the `uv documentation `_. +We highly recommend using ``uv`` to manage your Python environments, as it is very comprehensive, +easy to use, it is faster than any of the other tools availables (way faster!) and has a lot of features +that make it easier to work with Python. + +Alternative: pipx tool +---------------------- -We are using ``pipx`` tool to install and manage Breeze. The ``pipx`` tool is created by the creators +However, we do not want to be entirely dependent on ``uv`` as it is a software governed by a VC-backed vendor, +so we always want to provide open-source governed alternatives for our tools. If you can't or do not want to +use ``uv``, we got you covered. Another too you can use to manage development tools (and ``breeze`` development +environment is Python-Software-Foundation managed ``pipx``. The ``pipx`` tool is created by the creators of ``pip`` from `Python Packaging Authority `_ -Note that ``pipx`` >= 1.4.1 is used. +Note that ``pipx`` >= 1.4.1 should be used. Install pipx @@ -172,7 +187,7 @@ environments. This can be done automatically by the following command (follow in pipx ensurepath -In Mac +In case ``pipx`` is not in your PATH, you can run it with Python module: .. code-block:: bash diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index 0f9cec28710e..b26230c2e163 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -861,7 +861,7 @@ def static_checks( for attempt in range(1, 1 + max_initialization_attempts): get_console().print(f"[info]Attempt number {attempt} to install pre-commit environments") initialization_result = run_command( - [sys.executable, "-m", "pre_commit", "install", "--install-hooks"], + ["pre-commit", "install", "--install-hooks"], check=False, no_output_dump_on_exception=True, text=True, @@ -874,7 +874,7 @@ def static_checks( get_console().print("[error]Could not install pre-commit environments[/]") sys.exit(return_code) - command_to_execute = [sys.executable, "-m", "pre_commit", "run"] + command_to_execute = ["pre-commit", "run"] if not one_or_none_set([last_commit, commit_ref, only_my_changes, all_files]): get_console().print( "\n[error]You can only specify " diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index f906ffaa4425..61a26e9993f8 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -232,13 +232,14 @@ class VersionedFile(NamedTuple): AIRFLOW_PIP_VERSION = "24.3.1" AIRFLOW_UV_VERSION = "0.4.29" AIRFLOW_USE_UV = False -WHEEL_VERSION = "0.36.2" -GITPYTHON_VERSION = "3.1.40" -RICH_VERSION = "13.7.0" -NODE_VERSION = "21.2.0" -PRE_COMMIT_VERSION = "3.5.0" -HATCH_VERSION = "1.9.1" -PYYAML_VERSION = "6.0.1" +# TODO: automate thsese as well +WHEEL_VERSION = "0.44.0" +GITPYTHON_VERSION = "3.1.43" +RICH_VERSION = "13.9.4" +NODE_VERSION = "22.2.0" +PRE_COMMIT_VERSION = "4.0.1" +HATCH_VERSION = "1.13.0" +PYYAML_VERSION = "6.0.2" AIRFLOW_BUILD_DOCKERFILE = f""" FROM python:{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}-slim-{ALLOWED_DEBIAN_VERSIONS[0]} diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index c80a5a9355eb..a674b142b3c3 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -32,10 +32,15 @@ except ImportError: get_console().print( "\n[error]Breeze doesn't support Python version <=3.8\n\n" - "[warning]Use Python 3.9 and force reinstall breeze with pipx\n\n" - " pipx install --force -e ./dev/breeze\n" + "[warning]Use Python 3.9 and force reinstall breeze:" + "" + " either with uv: \n\n" + " uv tool install --force --reinstall --editable ./dev/breeze\n\n" + "" + " or with pipx\n\n" + " pipx install --force -e ./dev/breeze --python 3.9\n" "\nTo find out more, visit [info]https://github.com/apache/airflow/" - "blob/main/dev/breeze/doc/01_installation.rst#the-pipx-tool[/]\n" + "blob/main/dev/breeze/doc/01_installation.rst[/]\n" ) sys.exit(1) from pathlib import Path @@ -253,13 +258,16 @@ def all_helm_test_packages() -> list[str]: @cache def all_task_sdk_test_packages() -> list[str]: - return sorted( - [ - candidate.name - for candidate in (AIRFLOW_SOURCES_ROOT / "task_sdk" / "tests").iterdir() - if candidate.is_dir() and candidate.name != "__pycache__" - ] - ) + try: + return sorted( + [ + candidate.name + for candidate in (AIRFLOW_SOURCES_ROOT / "task_sdk" / "tests").iterdir() + if candidate.is_dir() and candidate.name != "__pycache__" + ] + ) + except FileNotFoundError: + return [] ALLOWED_TASK_SDK_TEST_PACKAGES = [ diff --git a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py index 3aca9d51c130..b9bdc5302bdf 100644 --- a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py @@ -391,10 +391,7 @@ def create_virtualenv(force_venv_setup: bool) -> RunCommandResult: "[info]You can uninstall breeze and install it again with earlier Python " "version. For example:[/]\n" ) - get_console().print("pipx reinstall --python PYTHON_PATH apache-airflow-breeze\n") - get_console().print( - f"[info]PYTHON_PATH - path to your Python binary(< {higher_python_version_tuple})[/]\n" - ) + get_console().print("[info]Then recreate your k8s virtualenv with:[/]\n") get_console().print("breeze k8s setup-env --force-venv-setup\n") sys.exit(1) diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 0feba56356ba..cf04ecc27871 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -167,8 +167,9 @@ def reinstall_if_setup_changed() -> bool: return False if "apache-airflow-breeze" in e.msg: print( - """Missing Package `apache-airflow-breeze`. - Use `pipx install -e ./dev/breeze` to install the package.""" + """Missing Package `apache-airflow-breeze`. Please install it.\n + Use `uv tool install -e ./dev/breeze or `pipx install -e ./dev/breeze` + to install the package.""" ) return False sources_hash = get_installation_sources_config_metadata_hash() @@ -224,10 +225,10 @@ def get_used_airflow_sources() -> Path: @cache def find_airflow_sources_root_to_operate_on() -> Path: """ - Find the root of airflow sources we operate on. Handle the case when Breeze is installed via `pipx` from - a different source tree, so it searches upwards of the current directory to find the right root of - airflow directory we are actually in. This **might** be different than the sources of Airflow Breeze - was installed from. + Find the root of airflow sources we operate on. Handle the case when Breeze is installed via + `pipx` or `uv tool` from a different source tree, so it searches upwards of the current directory + to find the right root of airflow directory we are actually in. This **might** be different + than the sources of Airflow Breeze was installed from. If not found, we operate on Airflow sources that we were installed it. This handles the case when we run Breeze from a "random" directory. diff --git a/dev/breeze/src/airflow_breeze/utils/python_versions.py b/dev/breeze/src/airflow_breeze/utils/python_versions.py index d144139b0681..4f5a7a00bb58 100644 --- a/dev/breeze/src/airflow_breeze/utils/python_versions.py +++ b/dev/breeze/src/airflow_breeze/utils/python_versions.py @@ -51,8 +51,9 @@ def check_python_version(release_provider_packages: bool = False): get_console().print( "[warning]Please reinstall Breeze using Python 3.9 - 3.11 environment because not all " "provider packages support Python 3.12 yet.[/]\n\n" - "For example:\n\n" - "pipx uninstall apache-airflow-breeze\n" - "pipx install --python $(which python3.9) -e ./dev/breeze --force\n" + "If you are using uv:\n\n" + " uv tool install --force --reinstall --python 3.9 -e ./dev/breeze\n\n" + "If you are using pipx:\n\n" + " pipx install --python $(which python3.9) --force -e ./dev/breeze\n" ) sys.exit(1) diff --git a/dev/breeze/src/airflow_breeze/utils/reinstall.py b/dev/breeze/src/airflow_breeze/utils/reinstall.py index de3da9285543..6165c8a30720 100644 --- a/dev/breeze/src/airflow_breeze/utils/reinstall.py +++ b/dev/breeze/src/airflow_breeze/utils/reinstall.py @@ -27,15 +27,24 @@ def reinstall_breeze(breeze_sources: Path, re_run: bool = True): """ - Reinstalls Breeze from specified sources. + Re-installs Breeze from specified sources. :param breeze_sources: Sources where to install Breeze from. :param re_run: whether to re-run the original command that breeze was run with. """ + # First check if `breeze` is installed with uv and if it is, reinstall it using uv + # If not - we assume pipx is used and we reinstall it using pipx # Note that we cannot use `pipx upgrade` here because we sometimes install # Breeze from different sources than originally installed (i.e. when we reinstall airflow # From the current directory. get_console().print(f"\n[info]Reinstalling Breeze from {breeze_sources}\n") - subprocess.check_call(["pipx", "install", "-e", str(breeze_sources), "--force"]) + result = subprocess.run(["uv", "tool", "list"], text=True, capture_output=True, check=False) + if result.returncode == 0: + if "apache-airflow-breeze" in result.stdout: + subprocess.check_call( + ["uv", "tool", "install", "--force", "--reinstall", "-e", breeze_sources.as_posix()] + ) + else: + subprocess.check_call(["pipx", "install", "-e", breeze_sources.as_posix(), "--force"]) if re_run: # Make sure we don't loop forever if the metadata hash hasn't been updated yet (else it is tricky to # run pre-commit checks via breeze!) diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index 2e828936aa64..8396c0016dea 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -219,14 +219,14 @@ def assert_pre_commit_installed(): python_executable = sys.executable get_console().print(f"[info]Checking pre-commit installed for {python_executable}[/]") command_result = run_command( - [python_executable, "-m", "pre_commit", "--version"], + ["pre-commit", "--version"], capture_output=True, text=True, check=False, ) if command_result.returncode == 0: if command_result.stdout: - pre_commit_version = command_result.stdout.split(" ")[-1].strip() + pre_commit_version = command_result.stdout.split(" ")[1].strip() if Version(pre_commit_version) >= Version(min_pre_commit_version): get_console().print( f"\n[success]Package pre_commit is installed. " @@ -238,6 +238,20 @@ def assert_pre_commit_installed(): f"aat least {min_pre_commit_version} and is {pre_commit_version}.[/]\n\n" ) sys.exit(1) + if "pre-commit-uv" not in command_result.stdout: + get_console().print( + "\n[warning]You can significantly improve speed of installing your pre-commit envs " + "by installing `pre-commit-uv` with it.[/]\n" + ) + get_console().print( + "\n[warning]With uv you can install it with:[/]\n\n" + " uv tool install pre-commit --with pre-commit-uv --force-reinstall\n" + ) + get_console().print( + "\n[warning]With pipx you can install it with:[/]\n\n" + " pipx inject\n" + " pipx inject pre-commit pre-commit-uv\n" + ) else: get_console().print( "\n[warning]Could not determine version of pre-commit. You might need to update it![/]\n" @@ -459,9 +473,7 @@ def run_compile_www_assets( "[info]However, it requires you to have local yarn installation.\n" ) command_to_execute = [ - sys.executable, - "-m", - "pre_commit", + "pre-commit", "run", "--hook-stage", "manual", @@ -512,9 +524,7 @@ def run_compile_ui_assets( "[info]However, it requires you to have local pnpm installation.\n" ) command_to_execute = [ - sys.executable, - "-m", - "pre_commit", + "pre-commit", "run", "--hook-stage", "manual", diff --git a/scripts/ci/pre_commit/common_precommit_utils.py b/scripts/ci/pre_commit/common_precommit_utils.py index c3cb40ffeb51..b8d29410db4a 100644 --- a/scripts/ci/pre_commit/common_precommit_utils.py +++ b/scripts/ci/pre_commit/common_precommit_utils.py @@ -118,8 +118,11 @@ def initialize_breeze_precommit(name: str, file: str): if shutil.which("breeze") is None: console.print( "[red]The `breeze` command is not on path.[/]\n\n" - "[yellow]Please install breeze with `pipx install -e ./dev/breeze` from Airflow sources " - "and make sure you run `pipx ensurepath`[/]\n\n" + "[yellow]Please install breeze.\n" + "You can use uv with `uv tool install -e ./dev/breeze or " + "`pipx install -e ./dev/breeze`.\n" + "It will install breeze from Airflow sources " + "(make sure you run `pipx ensurepath` if you use pipx)[/]\n\n" "[bright_blue]You can also set SKIP_BREEZE_PRE_COMMITS env variable to non-empty " "value to skip all breeze tests." ) From 6fd7052f863ad9fc95ea4b82f8993fc5858d0dc3 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Sun, 3 Nov 2024 01:04:51 +0000 Subject: [PATCH 012/137] Move TaskInstance heartbeat directly on to TI row, not on Job row (#43599) This is part of the work for AIP-72 epic, but is done as a separate PR for ease of review. This PR by itself doesn't remove the LocalTaskJob row (that will happen in a future PR when the execution code is moved over to live in the TaskSDK) but this paves the way for it. The reason we are making this change is: - Having a separate row for tracking TI heartbeat is not really buying us much - With the addition of TaskInstanceHistory we don't need _another_ separate record of when/where TIs were run - It simplifies things (one less join in finding zombies) - Makes zombie tracking easier -- it is now just on the TI state, not the combination of TI and Job state. --- airflow/cli/cli_config.py | 2 - airflow/cli/commands/task_command.py | 1 - airflow/executors/debug_executor.py | 2 +- airflow/jobs/local_task_job_runner.py | 3 +- airflow/jobs/scheduler_job_runner.py | 18 +- ..._0_add_last_heartbeat_at_directly_to_ti.py | 60 ++++ airflow/models/taskinstance.py | 54 ++- airflow/models/taskinstancehistory.py | 1 - .../serialization/pydantic/taskinstance.py | 16 +- airflow/task/standard_task_runner.py | 23 +- airflow/utils/db.py | 2 +- airflow/www/views.py | 4 +- docs/apache-airflow/img/airflow_erd.sha256 | 2 +- docs/apache-airflow/img/airflow_erd.svg | 310 +++++++++--------- docs/apache-airflow/migrations-ref.rst | 4 +- .../pre_commit/check_ti_vs_tis_attributes.py | 2 + .../test_mapped_task_instance_endpoint.py | 1 - .../endpoints/test_task_instance_endpoint.py | 1 - .../routes/public/test_task_instances.py | 1 - tests/assets/test_manager.py | 1 + tests/cli/commands/test_task_command.py | 2 +- tests/executors/test_debug_executor.py | 2 +- tests/jobs/test_local_task_job.py | 18 +- tests/jobs/test_scheduler_job.py | 28 +- tests/models/test_taskinstance.py | 2 +- tests/www/views/test_views_tasks.py | 14 +- 26 files changed, 294 insertions(+), 280 deletions(-) create mode 100644 airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index 15543023cbf6..06ac2f7bd817 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -580,7 +580,6 @@ def string_lower_type(val): ("--ship-dag",), help="Pickles (serializes) the DAG and ships it to the worker", action="store_true" ) ARG_PICKLE = Arg(("-p", "--pickle"), help="Serialized pickle object of the entire dag (used internally)") -ARG_JOB_ID = Arg(("-j", "--job-id"), help=argparse.SUPPRESS) ARG_CFG_PATH = Arg(("--cfg-path",), help="Path to config file to use instead of airflow.cfg") ARG_MAP_INDEX = Arg(("--map-index",), type=int, default=-1, help="Mapped task index") ARG_READ_FROM_DB = Arg(("--read-from-db",), help="Read dag from DB instead of dag file", action="store_true") @@ -1354,7 +1353,6 @@ class GroupCommand(NamedTuple): ARG_DEPENDS_ON_PAST, ARG_SHIP_DAG, ARG_PICKLE, - ARG_JOB_ID, ARG_INTERACTIVE, ARG_SHUT_DOWN_LOGGING, ARG_MAP_INDEX, diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index 23f6e1abbe5e..03d2737072f3 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -341,7 +341,6 @@ def _run_raw_task(args, ti: TaskInstance) -> None | TaskReturnCode: """Run the main task handling code.""" return ti._run_raw_task( mark_success=args.mark_success, - job_id=args.job_id, pool=args.pool, ) diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index 80fb673cab84..aead7e2b2c11 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -84,7 +84,7 @@ def _run_task(self, ti: TaskInstance) -> bool: key = ti.key try: params = self.tasks_params.pop(ti.key, {}) - ti.run(job_id=ti.job_id, **params) + ti.run(**params) self.success(key) return True except Exception as e: diff --git a/airflow/jobs/local_task_job_runner.py b/airflow/jobs/local_task_job_runner.py index a33005b0a52c..c900c88674e7 100644 --- a/airflow/jobs/local_task_job_runner.py +++ b/airflow/jobs/local_task_job_runner.py @@ -159,7 +159,6 @@ def sigusr2_debug_handler(signum, frame): wait_for_past_depends_before_skipping=self.wait_for_past_depends_before_skipping, ignore_task_deps=self.ignore_task_deps, ignore_ti_state=self.ignore_ti_state, - job_id=str(self.job.id), pool=self.pool, external_executor_id=self.external_executor_id, ): @@ -319,6 +318,8 @@ def heartbeat_callback(self, session: Session = NEW_SESSION) -> None: "Recorded pid %s does not match the current pid %s", recorded_pid, current_pid ) raise AirflowException("PID of job runner does not match") + ti.update_heartbeat() + elif self.task_runner.return_code() is None and hasattr(self.task_runner, "process"): self._overtime = (timezone.utcnow() - (ti.end_date or timezone.utcnow())).total_seconds() if ti.state == TaskInstanceState.SKIPPED: diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index b763011e5500..39e4e35087bc 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -30,7 +30,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Iterator -from sqlalchemy import and_, delete, exists, func, not_, or_, select, text, update +from sqlalchemy import and_, delete, exists, func, not_, select, text, update from sqlalchemy.exc import OperationalError from sqlalchemy.orm import lazyload, load_only, make_transient, selectinload from sqlalchemy.sql import expression @@ -777,7 +777,7 @@ def process_executor_events( "TaskInstance Finished: dag_id=%s, task_id=%s, run_id=%s, map_index=%s, " "run_start_date=%s, run_end_date=%s, " "run_duration=%s, state=%s, executor=%s, executor_state=%s, try_number=%s, max_tries=%s, " - "job_id=%s, pool=%s, queue=%s, priority_weight=%d, operator=%s, queued_dttm=%s, " + "pool=%s, queue=%s, priority_weight=%d, operator=%s, queued_dttm=%s, " "queued_by_job_id=%s, pid=%s" ) cls.logger().info( @@ -794,7 +794,6 @@ def process_executor_events( state, try_number, ti.max_tries, - ti.job_id, ti.pool, ti.queue, ti.priority_weight, @@ -821,7 +820,6 @@ def process_executor_events( span.set_attribute("operator", str(ti.operator)) span.set_attribute("try_number", ti.try_number) span.set_attribute("executor_state", state) - span.set_attribute("job_id", ti.job_id) span.set_attribute("pool", ti.pool) span.set_attribute("queue", ti.queue) span.set_attribute("priority_weight", ti.priority_weight) @@ -1977,22 +1975,20 @@ def _find_and_purge_zombies(self) -> None: self._purge_zombies(zombies, session=session) def _find_zombies(self, *, session: Session) -> list[tuple[TI, str, str]]: - from airflow.jobs.job import Job - self.log.debug("Finding 'running' jobs without a recent heartbeat") limit_dttm = timezone.utcnow() - timedelta(seconds=self._zombie_threshold_secs) zombies = session.execute( select(TI, DM.fileloc, DM.processor_subdir) .with_hint(TI, "USE INDEX (ti_state)", dialect_name="mysql") - .join(Job, TI.job_id == Job.id) .join(DM, TI.dag_id == DM.dag_id) - .where(TI.state == TaskInstanceState.RUNNING) - .where(or_(Job.state != JobState.RUNNING, Job.latest_heartbeat < limit_dttm)) - .where(Job.job_type == "LocalTaskJob") + .where( + TI.state.in_((TaskInstanceState.RUNNING, TaskInstanceState.RESTARTING)), + TI.last_heartbeat_at < limit_dttm, + ) .where(TI.queued_by_job_id == self.job.id) ).all() if zombies: - self.log.warning("Failing (%s) jobs without heartbeat after %s", len(zombies), limit_dttm) + self.log.warning("Failing %s TIs without heartbeat after %s", len(zombies), limit_dttm) return zombies def _purge_zombies(self, zombies: list[tuple[TI, str, str]], *, session: Session) -> None: diff --git a/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py b/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py new file mode 100644 index 000000000000..47e72de9dcb4 --- /dev/null +++ b/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py @@ -0,0 +1,60 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add last_heartbeat_at directly to TI. + +Revision ID: d8cd3297971e +Revises: 5f57a45b8433 +Create Date: 2024-11-01 12:14:59.927266 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import TIMESTAMP + +# revision identifiers, used by Alembic. +revision = "d8cd3297971e" +down_revision = "5f57a45b8433" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.add_column(sa.Column("last_heartbeat_at", TIMESTAMP(timezone=True), nullable=True)) + batch_op.drop_index("ti_job_id") + batch_op.create_index("ti_heartbeat", ["last_heartbeat_at"], unique=False) + batch_op.drop_column("job_id") + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.drop_column("job_id") + + +def downgrade(): + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.add_column(sa.Column("job_id", sa.INTEGER(), autoincrement=False, nullable=True)) + batch_op.drop_index("ti_heartbeat") + batch_op.create_index("ti_job_id", ["job_id"], unique=False) + batch_op.drop_column("last_heartbeat_at") + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.add_column(sa.Column("job_id", sa.INTEGER(), autoincrement=False, nullable=True)) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index bb07ba6d848a..e86c47778246 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -133,7 +133,7 @@ tuple_in_condition, with_row_locks, ) -from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState +from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.task_group import MappedTaskGroup from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timeout import timeout @@ -221,11 +221,16 @@ def _add_log( ) +@internal_api_call +@provide_session +def _update_ti_heartbeat(id: str, when: datetime, session: Session = NEW_SESSION): + session.execute(update(TaskInstance).where(TaskInstance.id == id).values(last_heartbeat_at=when)) + + def _run_raw_task( ti: TaskInstance | TaskInstancePydantic, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session | None = None, @@ -249,7 +254,6 @@ def _run_raw_task( ti.test_mode = test_mode ti.refresh_from_task(ti.task, pool_override=pool) ti.refresh_from_db(session=session) - ti.job_id = job_id ti.hostname = get_hostname() ti.pid = os.getpid() if not test_mode: @@ -451,7 +455,6 @@ def clear_task_instances( If set to False, DagRuns state will not be changed. :param dag: DAG object """ - job_ids = [] # Keys: dag_id -> run_id -> map_indexes -> try_numbers -> task_id task_id_by_key: dict[str, dict[str, dict[int, dict[int, set[str]]]]] = defaultdict( lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(set))) @@ -462,11 +465,9 @@ def clear_task_instances( for ti in tis: TaskInstanceHistory.record_ti(ti, session) if ti.state == TaskInstanceState.RUNNING: - if ti.job_id: - # If a task is cleared when running, set its state to RESTARTING so that - # the task is terminated and becomes eligible for retry. - ti.state = TaskInstanceState.RESTARTING - job_ids.append(ti.job_id) + # If a task is cleared when running, set its state to RESTARTING so that + # the task is terminated and becomes eligible for retry. + ti.state = TaskInstanceState.RESTARTING else: ti_dag = dag if dag and dag.dag_id == ti.dag_id else dag_bag.get_dag(ti.dag_id, session=session) task_id = ti.task_id @@ -522,11 +523,6 @@ def clear_task_instances( delete_qry = TR.__table__.delete().where(conditions) session.execute(delete_qry) - if job_ids: - from airflow.jobs.job import Job - - session.execute(update(Job).where(Job.id.in_(job_ids)).values(state=JobState.RESTARTING)) - if dag_run_state is not False and tis: from airflow.models.dagrun import DagRun # Avoid circular import @@ -806,7 +802,6 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.max_tries = source.max_tries target.hostname = source.hostname target.unixname = source.unixname - target.job_id = source.job_id target.pool = source.pool target.pool_slots = source.pool_slots or 1 target.queue = source.queue @@ -815,6 +810,7 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.custom_operator_name = source.custom_operator_name target.queued_dttm = source.queued_dttm target.queued_by_job_id = source.queued_by_job_id + target.last_heartbeat_at = source.last_heartbeat_at target.pid = source.pid target.executor = source.executor target.executor_config = source.executor_config @@ -1844,7 +1840,6 @@ class TaskInstance(Base, LoggingMixin): max_tries = Column(Integer, server_default=text("-1")) hostname = Column(String(1000)) unixname = Column(String(1000)) - job_id = Column(Integer) pool = Column(String(256), nullable=False) pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) @@ -1853,6 +1848,8 @@ class TaskInstance(Base, LoggingMixin): custom_operator_name = Column(String(1000)) queued_dttm = Column(UtcDateTime) queued_by_job_id = Column(Integer) + + last_heartbeat_at = Column(UtcDateTime) pid = Column(Integer) executor = Column(String(1000)) executor_config = Column(ExecutorConfigType(pickler=dill)) @@ -1885,8 +1882,8 @@ class TaskInstance(Base, LoggingMixin): Index("ti_state", state), Index("ti_state_lkp", dag_id, task_id, run_id, state), Index("ti_pool", pool, state, priority_weight), - Index("ti_job_id", job_id), Index("ti_trigger_id", trigger_id), + Index("ti_heartbeat", last_heartbeat_at), PrimaryKeyConstraint("id", name="task_instance_pkey"), UniqueConstraint("dag_id", "task_id", "run_id", "map_index", name="task_instance_composite_key"), ForeignKeyConstraint( @@ -2035,7 +2032,6 @@ def _command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -2074,7 +2070,6 @@ def _command_as_list( pickle_id=pickle_id, file_path=path, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, map_index=ti.map_index, @@ -2091,7 +2086,6 @@ def command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -2111,7 +2105,6 @@ def command_as_list( local=local, pickle_id=pickle_id, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, ) @@ -2131,7 +2124,6 @@ def generate_command( pickle_id: int | None = None, file_path: PurePath | str | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, map_index: int = -1, @@ -2156,7 +2148,6 @@ def generate_command( associated with the pickled DAG :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) - :param job_id: job ID (needs more details) :param pool: the Airflow pool that the task should run in :param cfg_path: the Path to the configuration file :return: shell command that can be used to run the task instance @@ -2166,8 +2157,6 @@ def generate_command( cmd.extend(["--mark-success"]) if pickle_id: cmd.extend(["--pickle", str(pickle_id)]) - if job_id: - cmd.extend(["--job-id", str(job_id)]) if ignore_all_deps: cmd.extend(["--ignore-all-dependencies"]) if ignore_task_deps: @@ -2641,7 +2630,6 @@ def _check_and_change_state_before_execution( mark_success: bool = False, test_mode: bool = False, hostname: str = "", - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session = NEW_SESSION, @@ -2661,7 +2649,6 @@ def _check_and_change_state_before_execution( :param mark_success: Don't run the task, mark its state as success :param test_mode: Doesn't record success or failure in the DB :param hostname: The hostname of the worker running the task instance. - :param job_id: Job (LocalTaskJob / SchedulerJob) ID :param pool: specifies the pool to use to run the task instance :param external_executor_id: The identifier of the celery executor :param session: SQLAlchemy ORM Session @@ -2684,7 +2671,6 @@ def _check_and_change_state_before_execution( ti.refresh_from_task(task, pool_override=pool) ti.test_mode = test_mode ti.refresh_from_db(session=session, lock_for_update=True) - ti.job_id = job_id ti.hostname = hostname ti.pid = None @@ -2789,7 +2775,6 @@ def check_and_change_state_before_execution( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session = NEW_SESSION, @@ -2805,7 +2790,6 @@ def check_and_change_state_before_execution( mark_success=mark_success, test_mode=test_mode, hostname=get_hostname(), - job_id=job_id, pool=pool, external_executor_id=external_executor_id, session=session, @@ -2876,7 +2860,6 @@ def _run_raw_task( self, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session = NEW_SESSION, @@ -2901,7 +2884,6 @@ def _run_raw_task( ti=self, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, raise_on_defer=raise_on_defer, session=session, @@ -3071,6 +3053,11 @@ def _execute_task(self, context: Context, task_orig: Operator): """ return _execute_task(self, context, task_orig) + def update_heartbeat(self): + cm = nullcontext() if InternalApiConfig.get_use_internal_api() else create_session() + with cm as session_or_null: + _update_ti_heartbeat(self.id, timezone.utcnow(), session_or_null) + @provide_session def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESSION) -> None: """ @@ -3101,7 +3088,6 @@ def run( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, session: Session = NEW_SESSION, raise_on_defer: bool = False, @@ -3116,7 +3102,6 @@ def run( ignore_ti_state=ignore_ti_state, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, session=session, ) @@ -3126,7 +3111,6 @@ def run( self._run_raw_task( mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, session=session, raise_on_defer=raise_on_defer, diff --git a/airflow/models/taskinstancehistory.py b/airflow/models/taskinstancehistory.py index ccdca700af6e..8c77daf92579 100644 --- a/airflow/models/taskinstancehistory.py +++ b/airflow/models/taskinstancehistory.py @@ -70,7 +70,6 @@ class TaskInstanceHistory(Base): max_tries = Column(Integer, server_default=text("-1")) hostname = Column(String(1000)) unixname = Column(String(1000)) - job_id = Column(Integer) pool = Column(String(256), nullable=False) pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) diff --git a/airflow/serialization/pydantic/taskinstance.py b/airflow/serialization/pydantic/taskinstance.py index caf44bea4c67..bf121353ca80 100644 --- a/airflow/serialization/pydantic/taskinstance.py +++ b/airflow/serialization/pydantic/taskinstance.py @@ -40,6 +40,7 @@ ) from airflow.serialization.pydantic.dag import DagModelPydantic from airflow.serialization.pydantic.dag_run import DagRunPydantic +from airflow.utils import timezone from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.xcom import XCOM_RETURN_KEY @@ -83,6 +84,7 @@ def validated_operator(x: dict[str, Any] | Operator, _info: ValidationInfo) -> A class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): """Serializable representation of the TaskInstance ORM SqlAlchemyModel used by internal API.""" + id: str task_id: str dag_id: str run_id: str @@ -96,7 +98,6 @@ class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): max_tries: int hostname: str unixname: str - job_id: Optional[int] pool: str pool_slots: int queue: str @@ -105,6 +106,7 @@ class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): custom_operator_name: Optional[str] queued_dttm: Optional[datetime] queued_by_job_id: Optional[int] + last_heartbeat_at: Optional[datetime] = None pid: Optional[int] executor: Optional[str] executor_config: Any @@ -138,7 +140,6 @@ def _run_raw_task( self, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session | None = None, @@ -147,7 +148,6 @@ def _run_raw_task( ti=self, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, raise_on_defer=raise_on_defer, session=session, @@ -252,6 +252,12 @@ def refresh_from_db(self, session: Session | None = None, lock_for_update: bool _refresh_from_db(task_instance=self, session=session, lock_for_update=lock_for_update) + def update_heartbeat(self): + """Update the recorded heartbeat for this task to "now".""" + from airflow.models.taskinstance import _update_ti_heartbeat + + return _update_ti_heartbeat(self.id, timezone.utcnow()) + def set_duration(self) -> None: """Set task instance duration.""" from airflow.models.taskinstance import _set_duration @@ -441,7 +447,6 @@ def check_and_change_state_before_execution( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session | None = None, @@ -457,7 +462,6 @@ def check_and_change_state_before_execution( mark_success=mark_success, test_mode=test_mode, hostname=get_hostname(), - job_id=job_id, pool=pool, external_executor_id=external_executor_id, session=session, @@ -484,7 +488,6 @@ def command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -504,7 +507,6 @@ def command_as_list( local=local, pickle_id=pickle_id, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, ) diff --git a/airflow/task/standard_task_runner.py b/airflow/task/standard_task_runner.py index d7f75f40e17d..a5641002c961 100644 --- a/airflow/task/standard_task_runner.py +++ b/airflow/task/standard_task_runner.py @@ -101,7 +101,6 @@ def __init__(self, job_runner: LocalTaskJobRunner): raw=True, pickle_id=self.job_runner.pickle_id, mark_success=self.job_runner.mark_success, - job_id=self.job_runner.job.id, pool=self.job_runner.pool, cfg_path=cfg_path, ) @@ -159,15 +158,10 @@ def _start_by_fork(self): # [1:] - remove "airflow" from the start of the command args = parser.parse_args(self._command[1:]) - # We prefer the job_id passed on the command-line because at this time, the - # task instance may not have been updated. - job_id = getattr(args, "job_id", self._task_instance.job_id) self.log.info("Running: %s", self._command) - self.log.info("Job %s: Subtask %s", job_id, self._task_instance.task_id) + self.log.info("Subtask %s", self._task_instance.task_id) proc_title = "airflow task runner: {0.dag_id} {0.task_id} {0.execution_date_or_run_id}" - if job_id is not None: - proc_title += " {0.job_id}" setproctitle(proc_title.format(args)) return_code = 0 try: @@ -179,15 +173,11 @@ def _start_by_fork(self): return_code = 0 if isinstance(ret, TaskReturnCode): return_code = ret.value - except Exception as exc: + except Exception: return_code = 1 self.log.exception( - "Failed to execute job %s for task %s (%s; %r)", - job_id, - self._task_instance.task_id, - exc, - os.getpid(), + "Failed to execute task_id=%s pid=%r", self._task_instance.task_id, os.getpid() ) except SystemExit as sys_ex: # Someone called sys.exit() in the fork - mistakenly. You should not run sys.exit() in @@ -250,10 +240,10 @@ def terminate(self): if self._rc == -signal.SIGKILL: self.log.error( ( - "Job %s was killed before it finished (likely due to running out of memory)", + "TI %s was killed before it finished (likely due to running out of memory)", "For more information, see https://airflow.apache.org/docs/apache-airflow/stable/troubleshooting.html#LocalTaskJob-killed", ), - self._task_instance.job_id, + self._task_instance.id, ) def get_process_pid(self) -> int: @@ -286,8 +276,7 @@ def _read_task_logs(self, stream): if not line: break self.log.info( - "Job %s: Subtask %s %s", - self._task_instance.job_id, + "Task %s %s", self._task_instance.task_id, line.rstrip("\n"), ) diff --git a/airflow/utils/db.py b/airflow/utils/db.py index 2c82f5f1948b..dd3e8c5d2002 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -97,7 +97,7 @@ class MappedClassProtocol(Protocol): "2.9.2": "686269002441", "2.10.0": "22ed7efa9da2", "2.10.3": "5f2621c13b39", - "3.0.0": "5f57a45b8433", + "3.0.0": "d8cd3297971e", } diff --git a/airflow/www/views.py b/airflow/www/views.py index d50d7bb2e78e..e287c027a894 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -5121,7 +5121,6 @@ class TaskInstanceModelView(AirflowModelView): "end_date", "duration", "note", - "job_id", "hostname", "unixname", "priority_weight", @@ -5146,7 +5145,6 @@ class TaskInstanceModelView(AirflowModelView): "end_date", "duration", # "note", # TODO: Maybe figure out how to re-enable this. - "job_id", "hostname", "unixname", "priority_weight", @@ -5192,7 +5190,7 @@ class TaskInstanceModelView(AirflowModelView): edit_form = TaskInstanceEditForm - base_order = ("job_id", "asc") + base_order = ("queued_dttm", "asc") base_filters = [["dag_id", DagFilter, list]] diff --git a/docs/apache-airflow/img/airflow_erd.sha256 b/docs/apache-airflow/img/airflow_erd.sha256 index f278eee7d05e..8adffd106eae 100644 --- a/docs/apache-airflow/img/airflow_erd.sha256 +++ b/docs/apache-airflow/img/airflow_erd.sha256 @@ -1 +1 @@ -9b9dcf915eff051a5cd77176a78bdcca3703b227373efe83fd0a1d4d05623c28 \ No newline at end of file +1d781ee92cc59e7647d7f72ddc542b7f17e03fc8b822950db74415c38279d40f \ No newline at end of file diff --git a/docs/apache-airflow/img/airflow_erd.svg b/docs/apache-airflow/img/airflow_erd.svg index 177c5a60f14e..1b0d5b346c95 100644 --- a/docs/apache-airflow/img/airflow_erd.svg +++ b/docs/apache-airflow/img/airflow_erd.svg @@ -1144,9 +1144,9 @@ [VARCHAR(1000)] -job_id - - [INTEGER] +last_heartbeat_at + + [TIMESTAMP] map_index @@ -1708,176 +1708,172 @@ task_instance_history - -task_instance_history - -id - - [INTEGER] - NOT NULL - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -job_id - - [INTEGER] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSON] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +id + + [INTEGER] + NOT NULL + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSON] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 diff --git a/docs/apache-airflow/migrations-ref.rst b/docs/apache-airflow/migrations-ref.rst index f3441ceaf72b..f133a67e08ef 100644 --- a/docs/apache-airflow/migrations-ref.rst +++ b/docs/apache-airflow/migrations-ref.rst @@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``5f57a45b8433`` (head) | ``486ac7936b78`` | ``3.0.0`` | Drop task_fail table. | +| ``d8cd3297971e`` (head) | ``5f57a45b8433`` | ``3.0.0`` | Add last_heartbeat_at directly to TI. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``5f57a45b8433`` | ``486ac7936b78`` | ``3.0.0`` | Drop task_fail table. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``486ac7936b78`` | ``d59cbbef95eb`` | ``3.0.0`` | remove scheduler_lock column. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py b/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py index 1dfc51a0a040..16c1df48a9e8 100755 --- a/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py +++ b/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py @@ -52,6 +52,8 @@ def compare_attributes(path1, path2): "triggerer_job", "note", "rendered_task_instance_fields", + # Storing last heartbeat for historic TIs is not interesting/useful + "last_heartbeat_at", } # exclude attrs not necessary to be in TaskInstanceHistory if not diff: return diff --git a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py index 237ef5910c78..68ecd1e83898 100644 --- a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py @@ -77,7 +77,6 @@ def setup_attrs(self, configured_app) -> None: "duration": 10000, "pool": "default_pool", "queue": "default_queue", - "job_id": 0, } self.app = configured_app self.client = self.app.test_client() # type:ignore diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index bc8836981d42..e1fa6d13b748 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -81,7 +81,6 @@ def setup_attrs(self, configured_app, dagbag) -> None: "duration": 10000, "pool": "default_pool", "queue": "default_queue", - "job_id": 0, } self.app = configured_app self.client = self.app.test_client() # type:ignore diff --git a/tests/api_fastapi/core_api/routes/public/test_task_instances.py b/tests/api_fastapi/core_api/routes/public/test_task_instances.py index fa9cc0b161d0..717f17ca278a 100644 --- a/tests/api_fastapi/core_api/routes/public/test_task_instances.py +++ b/tests/api_fastapi/core_api/routes/public/test_task_instances.py @@ -68,7 +68,6 @@ def setup_attrs(self, session) -> None: "duration": 10000, "pool": "default_pool", "queue": "default_queue", - "job_id": 0, } clear_db_runs() clear_rendered_ti_fields() diff --git a/tests/assets/test_manager.py b/tests/assets/test_manager.py index eb12f281606e..3310502a97ba 100644 --- a/tests/assets/test_manager.py +++ b/tests/assets/test_manager.py @@ -59,6 +59,7 @@ def clear_assets(): @pytest.fixture def mock_task_instance(): return TaskInstancePydantic( + id="1", task_id="5", dag_id="7", run_id="11", diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 9b605e818d8f..5a4e0b279242 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -932,7 +932,7 @@ def test_logging_with_run_task_subprocess(self, session): print(logs) # In case of a test failures this line would show detailed log logs_list = logs.splitlines() - assert f"Subtask {self.task_id}" in logs + assert f"Task {self.task_id}" in logs assert "standard_task_runner.py" in logs self.assert_log_line("Log from DAG Logger", logs_list) self.assert_log_line("Log from TI Logger", logs_list) diff --git a/tests/executors/test_debug_executor.py b/tests/executors/test_debug_executor.py index 20ee821842c8..a8ad66795767 100644 --- a/tests/executors/test_debug_executor.py +++ b/tests/executors/test_debug_executor.py @@ -50,7 +50,7 @@ def test_run_task(self, task_instance_mock): succeeded = executor._run_task(task_instance_mock) assert succeeded - task_instance_mock.run.assert_called_once_with(job_id=job_id) + task_instance_mock.run.assert_called() def test_queue_task_instance(self): key = "ti_key" diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 84a7465a8236..7ee037478833 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -131,17 +131,21 @@ def test_localtaskjob_essential_attr(self, dag_maker): assert all(check_result_2) @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode - def test_localtaskjob_heartbeat(self, dag_maker): + def test_localtaskjob_heartbeat(self, dag_maker, time_machine): session = settings.Session() with dag_maker("test_localtaskjob_heartbeat"): op1 = EmptyOperator(task_id="op1") + time_machine.move_to(DEFAULT_DATE, tick=False) + dr = dag_maker.create_dagrun() ti = dr.get_task_instance(task_id=op1.task_id, session=session) ti.state = State.RUNNING ti.hostname = "blablabla" session.commit() + assert ti.last_heartbeat_at is None, "Pre-conditioncheck" + job1 = Job(dag_id=ti.dag_id, executor=SequentialExecutor()) job_runner = LocalTaskJobRunner(job=job1, task_instance=ti, ignore_ti_state=True) ti.task = op1 @@ -149,9 +153,12 @@ def test_localtaskjob_heartbeat(self, dag_maker): job1.task_runner = StandardTaskRunner(job_runner) job1.task_runner.process = mock.Mock() job_runner.task_runner = job1.task_runner - with pytest.raises(AirflowException): + with pytest.raises(AirflowException, match="Hostname .* does not match"): job_runner.heartbeat_callback() + ti = session.get(TaskInstance, (ti.id,)) + assert ti.last_heartbeat_at is None, "Should still be none" + job1.task_runner.process.pid = 1 ti.state = State.RUNNING ti.hostname = get_hostname() @@ -164,19 +171,22 @@ def test_localtaskjob_heartbeat(self, dag_maker): job_runner.heartbeat_callback(session=None) job1.task_runner.process.pid = 2 - with pytest.raises(AirflowException): + with pytest.raises(AirflowException, match="PID .* does not match"): job_runner.heartbeat_callback() # Now, set the ti.pid to None and test that no error # is raised. ti.pid = None - session.merge(ti) + ti = session.merge(ti) session.commit() assert ti.pid != job1.task_runner.process.pid assert not ti.run_as_user assert not job1.task_runner.run_as_user job_runner.heartbeat_callback() + ti = session.get(TaskInstance, (ti.id,)) + assert ti.last_heartbeat_at == DEFAULT_DATE + @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode @mock.patch("subprocess.check_call") @mock.patch("airflow.jobs.local_task_job_runner.psutil") diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 311de0ce2b64..da3ccc201eb4 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -50,7 +50,6 @@ from airflow.executors.executor_constants import MOCK_EXECUTOR from airflow.executors.executor_loader import ExecutorLoader from airflow.jobs.job import Job, run_job -from airflow.jobs.local_task_job_runner import LocalTaskJobRunner from airflow.jobs.scheduler_job_runner import SchedulerJobRunner from airflow.models.asset import AssetActive, AssetDagRunQueue, AssetEvent, AssetModel from airflow.models.backfill import Backfill, _create_backfill @@ -68,7 +67,7 @@ from airflow.utils import timezone from airflow.utils.file import list_py_file_paths from airflow.utils.session import create_session, provide_session -from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState +from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.types import DagRunType from tests.listeners import dag_listener @@ -5665,16 +5664,10 @@ def test_find_and_purge_zombies(self, load_examples, session): for task_id in tasks_to_setup: task = dag.get_task(task_id=task_id) ti = TaskInstance(task, run_id=dag_run.run_id, state=State.RUNNING) - ti.queued_by_job_id = 999 - - local_job = Job(dag_id=ti.dag_id) - LocalTaskJobRunner(job=local_job, task_instance=ti) - local_job.state = TaskInstanceState.FAILED - session.add(local_job) - session.flush() + ti.last_heartbeat_at = timezone.utcnow() - timedelta(minutes=6) + ti.queued_by_job_id = 999 - ti.job_id = local_job.id session.add(ti) session.flush() @@ -5733,13 +5726,6 @@ def test_zombie_message(self, load_examples): ti = TaskInstance(task, run_id=dag_run.run_id, state=State.RUNNING) ti.queued_by_job_id = 999 - local_job = Job(dag_id=ti.dag_id) - local_job.state = TaskInstanceState.FAILED - - session.add(local_job) - session.flush() - - ti.job_id = local_job.id session.add(ti) session.flush() @@ -5795,17 +5781,11 @@ def test_find_zombies_handle_failure_callbacks_are_correctly_passed_to_dag_proce task = dag.get_task(task_id="run_this_last") ti = TaskInstance(task, run_id=dag_run.run_id, state=State.RUNNING) - - local_job = Job(dag_id=ti.dag_id) - LocalTaskJobRunner(job=local_job, task_instance=ti) - local_job.state = JobState.FAILED - session.add(local_job) - session.flush() + ti.last_heartbeat_at = timezone.utcnow() - timedelta(minutes=6) # TODO: If there was an actual Relationship between TI and Job # we wouldn't need this extra commit session.add(ti) - ti.job_id = local_job.id session.flush() scheduler_job = Job() diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index ccd19ad3272f..8a1df0594e4e 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -3993,7 +3993,6 @@ def test_refresh_from_db(self, create_task_instance): "hostname": "some_unique_hostname", "id": str(uuid6.uuid7()), "unixname": "some_unique_unixname", - "job_id": 1234, "pool": "some_fake_pool_id", "pool_slots": 25, "queue": "some_queue_id", @@ -4004,6 +4003,7 @@ def test_refresh_from_db(self, create_task_instance): "rendered_map_index": None, "queued_by_job_id": 321, "pid": 123, + "last_heartbeat_at": run_date + datetime.timedelta(hours=1, seconds=4), "executor": "some_executor", "executor_config": {"Some": {"extra": "information"}}, "external_executor_id": "some_executor_id", diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index fabd104e8c26..19caafe55bc6 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -1109,7 +1109,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1145,7 +1145,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1181,7 +1181,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1217,7 +1217,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1253,7 +1253,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1289,7 +1289,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, @@ -1325,7 +1325,7 @@ def test_task_instances(admin_client): "external_executor_id": None, "hostname": "", "id": unittest.mock.ANY, # Ignore the `id` field - "job_id": None, + "last_heartbeat_at": None, "map_index": -1, "max_tries": 0, "next_kwargs": None, From 45b0b2f15c57dae4f2331a66a9a921cb17385220 Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Sun, 3 Nov 2024 10:02:59 +0200 Subject: [PATCH 013/137] Prepare docs for Oct 2nd wave of providers rc3 (#43613) --- .../commits.rst | 108 ++++++++++++++++-- docs/apache-airflow-providers-fab/commits.rst | 46 +++++--- .../airflow/providers/amazon/CHANGELOG.rst | 2 + .../src/airflow/providers/fab/CHANGELOG.rst | 8 +- 4 files changed, 138 insertions(+), 26 deletions(-) diff --git a/docs/apache-airflow-providers-amazon/commits.rst b/docs/apache-airflow-providers-amazon/commits.rst index a1cb9ecca530..461ff00d2259 100644 --- a/docs/apache-airflow-providers-amazon/commits.rst +++ b/docs/apache-airflow-providers-amazon/commits.rst @@ -38,11 +38,13 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-31 ``Standard provider python operator (#42081)`` +`5886016243 `_ 2024-10-31 ``Prepare docs for Oct 2nd wave of providers RC2 (#43540)`` `d8c7d28411 `_ 2024-10-30 ``Start porting DAG definition code to the Task SDK (#43076)`` `39dba929bb `_ 2024-10-29 ``fix(providers/amazon): alias is_authorized_dataset to is_authorized_asset (#43470)`` `9811f1d6d0 `_ 2024-10-28 ``Limit mypy-boto3-appflow (#43436)`` @@ -68,19 +70,24 @@ Commit ================================================================================================= =========== ======================================================================================================== `2bb8628463 `_ 2024-10-09 ``Prepare docs for Oct 1st adhoc wave of providers (#42862)`` `d395c1f1f8 `_ 2024-10-09 ``Revert "Remove 'sqlalchemy-redshift' dependency from Amazon provider (#42830)" (#42864)`` +`a5ffbbda17 `_ 2024-10-09 ``Standard provider bash operator (#42252)`` `3b4c73a644 `_ 2024-10-08 ``Remove 'sqlalchemy-redshift' dependency from Amazon provider (#42830)`` +`b98c620a38 `_ 2024-10-08 ``Improve docs on adding Google secrets in AWS Secrets Manager (#42832)`` `63ff22f403 `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` `1cb9294c64 `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` `ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` `568162263d `_ 2024-09-26 ``Remove deprecated stuff from Amazon provider package (#42450)`` +`9d01d0354b `_ 2024-09-26 ``Move ECS executor to stable (#42483)`` `ac0711f0f1 `_ 2024-09-26 ``'S3DeleteObjects' Operator: Handle dates passed as strings (#42464)`` `f6852c2c55 `_ 2024-09-25 ``Remove identity center auth manager cli (#42481)`` `663da777f9 `_ 2024-09-25 ``Small fix to AWS AVP cli init script (#42479)`` `18c54bcb42 `_ 2024-09-25 ``#42442 Make the AWS logging faster by reducing the amount of sleep (#42449)`` +`55fe75d083 `_ 2024-09-25 ``Purge existing SLA implementation (#42285)`` `d87f9b0505 `_ 2024-09-25 ``Refactor AWS Auth manager user output (#42454)`` `ab3429c318 `_ 2024-09-24 ``Add STOPPED to the failure cases for Sagemaker Training Jobs (#42423)`` `8580e6d046 `_ 2024-09-24 ``Support session reuse in 'RedshiftDataOperator' (#42218)`` `4c8c72f4a4 `_ 2024-09-24 ``Fix logout in AWS auth manager (#42447)`` +`226adcde62 `_ 2024-09-24 ``Simple auth manager documentation (#42390)`` `b9629d99b2 `_ 2024-09-24 ``fix(providers/amazon): handle ClientError raised after key is missing during table.get_item (#42408)`` ================================================================================================= =========== ======================================================================================================== @@ -96,6 +103,7 @@ Commit `4afc2569b1 `_ 2024-09-16 ``ECSExecutor: Drop params that aren't compatible with EC2 (#42228)`` `5fb0d65aaf `_ 2024-09-11 ``Adding support for volume configurations in ECSRunTaskOperator (#42087)`` `1bde32ab7b `_ 2024-09-11 ``Actually move saml to amazon provider (mistakenly added in papermill) (#42148)`` +`ff718bd7e3 `_ 2024-09-09 ``Aws executor docs update (#42092)`` `c7fd6b445f `_ 2024-09-06 ``Fix 'GlueDataBrewStartJobOperator' template fields (#42073)`` `12bb8b3524 `_ 2024-09-05 ``Use base aws classes in AWS Glue DataBrew Operators/Triggers (#41848)`` `bfbff66a20 `_ 2024-09-04 ``validate aws service exceptions in waiters (#41941)`` @@ -201,6 +209,7 @@ Commit `81c331e29a `_ 2024-06-17 ``Update pandas minimum requirement for Python 3.12 (#40272)`` `6f4098487d `_ 2024-06-14 ``openlineage, redshift: do not call DB for schemas below Airflow 2.10 (#40197)`` `835f28c8b9 `_ 2024-06-12 ``Lazy match escaped quotes in 'RedshiftToS3Operator' (#40206)`` +`c2a93eabd1 `_ 2024-06-12 ``Update AWS Executor documentation (#39920)`` `15178b6953 `_ 2024-06-08 ``Use stdlib 'importlib.metadata' for retrieve 'botocore' package version (#40137)`` `22bd188439 `_ 2024-06-08 ``Fix 'importlib_metadata' import in aws utils (#40134)`` ================================================================================================= =========== ==================================================================================== @@ -245,6 +254,7 @@ Commit `a78ee74b6a `_ 2024-05-22 ``bugfix: handle invalid cluster states in NeptuneStopDbClusterOperator (#38287)`` `49b38719e2 `_ 2024-05-21 ``Fix automatic termination issue in 'EmrOperator' by ensuring 'waiter_max_attempts' is set for deferrable triggers (#38658)`` `8d1bd345b2 `_ 2024-05-17 ``fix: empty openlineage dataset name for AthenaExtractor (#39677)`` +`4d0c7242bc `_ 2024-05-16 ``Small refactor for example_bedrock_knowledge_base.py (#39672)`` `f3687b68a6 `_ 2024-05-16 ``Sagemaker trigger: pass the job name as part of the event (#39671)`` `9ea78d9d72 `_ 2024-05-15 ``Fix default value for aws batch operator retry strategy (#39608)`` `9284dc5391 `_ 2024-05-15 ``Amazon Bedrock - Retrieve and RetrieveAndGenerate (#39500)`` @@ -267,6 +277,7 @@ Commit `c9f4618be4 `_ 2024-05-09 ``Drop 'xmlsec' pin (#39534)`` `3938f71dfa `_ 2024-05-08 ``Scheduler to handle incrementing of try_number (#39336)`` `73918925ed `_ 2024-05-08 ``Simplify 'airflow_version' imports (#39497)`` +`820bfbba17 `_ 2024-05-08 ``fix broken link (#39489)`` `9c739d7142 `_ 2024-05-07 ``'S3DeleteObjectsOperator' Added ability to filter keys by last modified time (#39151)`` `a74b5f0694 `_ 2024-05-06 ``ECS Executor: Set tasks to RUNNING state once active (#39212)`` `598398a816 `_ 2024-05-02 ``Amazon Bedrock - Knowledge Bases and Data Sources (#39245)`` @@ -290,9 +301,11 @@ Commit `fcb2bee1c6 `_ 2024-04-22 ``Update logic to allow retries in AWS Batch Client hook to be effective (#38998)`` `4b9b400cff `_ 2024-04-18 ``Allow importing the aws executors with a shorter path (#39093)`` `cd888870ba `_ 2024-04-18 ``Fix main failing because of the new xmlsec 1.3.14 compatibility (#39104)`` +`4a288460a5 `_ 2024-04-16 ``Add examples in AWS auth manager documentation (#39040)`` `1ded297509 `_ 2024-04-15 ``Remove flag from AWS auth manager to use it (#39033)`` `6520653c43 `_ 2024-04-14 ``Activate RUF015 that checks for unnecessary iterable allocation for first element (#38949)`` `1c9a6609f3 `_ 2024-04-14 ``Adding MSGraphOperator in Microsoft Azure provider (#38111)`` +`3eac9778f6 `_ 2024-04-14 ``AWS auth manager documentation: manage environment section (#38932)`` `d946d7b013 `_ 2024-04-13 ``Fix bug in GlueJobOperator where consecutive runs fail when a local script file is used (#38960)`` `c25d346adf `_ 2024-04-11 ``Amazon Bedrock - Model Throughput Provisioning (#38850)`` ================================================================================================= =========== ============================================================================================================ @@ -313,7 +326,10 @@ Commit `1f03b9c86c `_ 2024-04-05 ``Typo fix (#38783)`` `12944002aa `_ 2024-04-03 ``Add fallback 'region_name' value to AWS Executors (#38704)`` `0723a8f01d `_ 2024-03-30 ``Introduce Amazon Bedrock service (#38602)`` +`b47b5ff55a `_ 2024-03-26 ``Complete AWS auth manager documentation: configuration of Amazon Verified Permissions (#38409)`` `ff28969ff3 `_ 2024-03-25 ``fix: EmrServerlessStartJobOperator not serializing DAGs correctly when partial/expand is used. (#38022)`` +`0d11f3ca2f `_ 2024-03-22 ``Add experimental warning in AWS auth manager documentation (#38414)`` +`83d62cae69 `_ 2024-03-22 ``Create AWS auth manager documentation. Part: setup identity center (#38273)`` `9ea4050d41 `_ 2024-03-22 ``fix(amazon): add return statement to yield within a while loop in triggers (#38396)`` `c893cb3bfb `_ 2024-03-22 ``Fix set deprecated amazon operators arguments in 'MappedOperator' (#38346)`` `ea951afb69 `_ 2024-03-21 ``Add check in AWS auth manager to check if the Amazon Verified Permissions schema is up to date (#38333)`` @@ -385,8 +401,10 @@ Commit `bfb054e9e8 `_ 2024-02-12 ``Prepare docs 1st wave of Providers February 2024 (#37326)`` `8fac799a25 `_ 2024-02-10 ``D401 support in amazon provider (#37275)`` `48bfb1a970 `_ 2024-02-09 ``Merge all ECS executor configs following recursive python dict update (#37137)`` +`988da34fab `_ 2024-02-08 ``replace .rst with .html in links (#37265)`` `90e2b12d6b `_ 2024-02-08 ``Upgrade mypy to 1.8.0 (#36428)`` `1f1fa2f7e3 `_ 2024-02-07 ``Update default value for 'BatchSensor' (#37234)`` +`8ef1830dda `_ 2024-02-07 ``ECS Executor doc fix (#37210)`` `d8ce75cd53 `_ 2024-02-07 ``remove info log from download_file (#37211)`` `af30e1b619 `_ 2024-02-06 ``feature: S3ToRedshiftOperator templating aws_conn_id (#37195)`` `41ebf28103 `_ 2024-02-05 ``ECS Executor - Add backoff on failed task retry (#37109)`` @@ -412,6 +430,7 @@ Commit `2b4da0101f `_ 2024-01-22 ``Prepare docs 2nd wave of Providers January 2024 (#36945)`` `d18c01a7a2 `_ 2024-01-18 ``EC2 'CreateInstance': terminate instances in on_kill (#36828)`` `9563dc573b `_ 2024-01-18 ``add deferrable mode to RedshiftDataOperator (#36586)`` +`2d330cb487 `_ 2024-01-18 ``Fix documentation on IAM permissions for 'EcsRunTaskOperator' (#36865)`` `f6e357a5fc `_ 2024-01-18 ``Fallback to default value if '[aws] cloudwatch_task_handler_json_serializer' not set (#36851)`` `6ff96af480 `_ 2024-01-18 ``Fix stacklevel in warnings.warn into the providers (#36831)`` `666127238b `_ 2024-01-17 ``AthenaSqlHook implementation (#36171)`` @@ -425,6 +444,7 @@ Commit `b241577c9c `_ 2024-01-14 ``Use base aws classes in Amazon DynamoDB Sensors (#36770)`` `1455a3babb `_ 2024-01-14 ``Use base aws classes in AWS CloudFormation Operators/Sensors (#36771)`` `e7166bb759 `_ 2024-01-11 ``Adds support for capacity providers to ECS Executor (#36722)`` +`25543846b8 `_ 2024-01-11 ``docs: List permissions required to use S3 logging (#36736)`` `b260367208 `_ 2024-01-11 ``check transform job status before deferring SageMakerTransformOperator (#36680)`` `c439ab87c4 `_ 2024-01-10 ``Standardize airflow build process and switch to Hatchling build backend (#36537)`` `9f04716179 `_ 2024-01-10 ``Add use_regex argument for allowing 'S3KeySensor' to check s3 keys with regular expression (#36578)`` @@ -437,6 +457,7 @@ Commit `4d5e05b28b `_ 2024-01-08 ``Add deferrable mode to RedshiftClusterSensor (#36550)`` `298c37d355 `_ 2024-01-08 ``Bump min version of amazon-provider related dependencies (#36660)`` `43afb2f6b8 `_ 2024-01-08 ``Implement 'is_authorized_dag' in AWS auth manager (#36619)`` +`90fb783953 `_ 2024-01-08 ``Update EMR index.rst (#36665)`` ================================================================================================= =========== =================================================================================================================== 8.16.0 @@ -452,6 +473,7 @@ Commit `365f206a32 `_ 2024-01-05 ``Select ruff B006 to detect the usage of mutable values as argument default (#36626)`` `16d16e2933 `_ 2024-01-05 ``Use base aws classes in Amazon SQS Operators/Sensors/Triggers (#36613)`` `034e618347 `_ 2024-01-05 ``Use base aws classes in Amazon SNS Operators (#36615)`` +`16361f18d8 `_ 2024-01-05 ``Fix broken link for Redshift connection (#36614)`` `c1bba9906c `_ 2024-01-05 ``Fix assignment of template field in '__init__' in 'AwsToAwsBaseOperator' (#36604)`` `a877bde5a3 `_ 2024-01-05 ``Fix assignment of template field in '__init__' in 'DataSyncOperator' (#36605)`` `ebd588b9f4 `_ 2024-01-04 ``Add AWS Step Functions links (#36599)`` @@ -487,6 +509,7 @@ Commit `f5883d6e7b `_ 2023-12-23 ``Prepare 2nd wave of providers in December (#36373)`` `0b32613480 `_ 2023-12-21 ``Increase ConflictException retries to 4 total (#36337)`` `30afa46e90 `_ 2023-12-21 ``Increase width of execution_date input in trigger.html (#36278) (#36304)`` +`e718395714 `_ 2023-12-20 ``Remove notifiers compatibility note (#36323)`` `2b31f373ef `_ 2023-12-20 ``Remove remaining Airflow 2.6 backcompat code from Amazon Provider (#36324)`` `e9ba37bb58 `_ 2023-12-17 ``Add code snippet formatting in docstrings via Ruff (#36262)`` `357355ac09 `_ 2023-12-11 ``Remove 'is_authorized_cluster_activity' from auth manager (#36175)`` @@ -526,6 +549,7 @@ Commit `ab835c20b2 `_ 2023-11-29 ``Fix EC2Hook get_instance for client_type api (#35960)`` `9ab343714a `_ 2023-11-29 ``Remove setting a non-existing object param and use local var instead in S3Hook (#35950)`` `f6962a929b `_ 2023-11-28 ``Support IAM authentication for Redshift serverless (#35897)`` +`16585b178f `_ 2023-11-28 ``Update redshift_sql.rst to reference Redshift connection (#35902)`` `3b3ebafdce `_ 2023-11-27 ``Implement 'is_authorized_variable' in AWS auth manager (#35804)`` `0f5db49ec4 `_ 2023-11-27 ``Stop getting message from event after migrating 'EmrContainerTrigger' to 'AwsBaseWaiterTrigger' (#35892)`` `9059f72668 `_ 2023-11-25 ``Enhance 'attribute_value' in 'DynamoDBValueSensor' to accept list (#35831)`` @@ -574,6 +598,8 @@ Commit `11bdfe4c12 `_ 2023-11-07 ``Work around typing issue in examples and providers (#35494)`` `f24e5199c7 `_ 2023-11-06 ``Fix AWS RDS hook's DB instance state check (#34773)`` `a61da3cc87 `_ 2023-11-04 ``Add verificationy that provider docs are as expected (#35424)`` +`706878ec35 `_ 2023-11-04 ``Remove empty lines in generated changelog (#35436)`` +`052e26ad47 `_ 2023-11-04 ``Change security.rst to use includes in providers (#35435)`` `ae9a7b8188 `_ 2023-11-03 ``ECS Executor Health Check (#35412)`` `92d1e8c447 `_ 2023-11-03 ``Move ECS Executor to its own file (#35418)`` `9782ee3bbc `_ 2023-11-01 ``Fix parameter syntax in Amazon docstrings (#35349)`` @@ -632,6 +658,7 @@ Commit ================================================================================================= =========== =================================================================================================================== `e9987d5059 `_ 2023-10-13 ``Prepare docs 1st wave of Providers in October 2023 (#34916)`` `545e4d505e `_ 2023-10-12 ``Extend hooks arguments into 'AwsBaseWaiterTrigger' (#34884)`` +`b9cef1ddea `_ 2023-10-12 ``Add general links about TCP keepalive into AWS Lambda docs (#34870)`` `84a3daed86 `_ 2023-10-11 ``Implements 'AwsBaseOperator' and 'AwsBaseSensor' (#34784)`` `8e26865763 `_ 2023-10-06 ``Refactor consolidate import from io in providers (#34378)`` `c01abd1c2e `_ 2023-10-06 ``Upgrade watchtower to 3.0.1 (#25019) (#34747)`` @@ -665,6 +692,7 @@ Commit `7de7149bc6 `_ 2023-09-19 ``Works on #34425 (#34453)`` `bbfa228868 `_ 2023-09-18 ``Deprecate get_hook in DataSyncOperator and use hook instead (#34427)`` `8ecd576de1 `_ 2023-09-14 ``Refactor shorter defaults in providers (#34347)`` +`0dc479906d `_ 2023-09-14 ``fix Amazon index.rst doc (#34368)`` ================================================================================================= =========== =================================================================================================================== 8.7.1 @@ -690,9 +718,9 @@ Commit Latest change: 2023-09-08 -================================================================================================= =========== ==================================================================================================== +================================================================================================= =========== ======================================================================================================== Commit Committed Subject -================================================================================================= =========== ==================================================================================================== +================================================================================================= =========== ======================================================================================================== `21990ed894 `_ 2023-09-08 ``Prepare docs for 09 2023 - 1st wave of Providers (#34201)`` `4fa66d1700 `_ 2023-09-07 ``Refactor: Consolidate import and usage of random (#34108)`` `9079093291 `_ 2023-09-07 ``Consolidate importing of os.path.* (#34060)`` @@ -705,7 +733,10 @@ Commit `6eaa69ee46 `_ 2023-09-01 ``Make 'aws.session_factory' part of Amazon provider configuration documentation (#33960)`` `875387afa5 `_ 2023-09-01 ``Refactor unneeded jumps in providers (#33833)`` `c4967b00c4 `_ 2023-09-01 ``Make Amazon Chime connection lazy loaded and consistent with docs (#34000)`` +`9144308b3d `_ 2023-09-01 ``Fix examples for Amazon notifiers (#34009)`` `a9bbb430fc `_ 2023-09-01 ``Replace try - except pass by contextlib.suppress in providers (#33980)`` +`687977f8f3 `_ 2023-09-01 ``docs(providers): replace markdown style link with rst style link for amazon and apache-beam (#33992)`` +`4610df1ecf `_ 2023-09-01 ``Reformat Apprise and Amazon Chime connections docs (#33971)`` `0a5e2281e0 `_ 2023-08-31 ``Remove some useless try/except from providers code (#33967)`` `17d031df66 `_ 2023-08-31 ``Add Amazon SQS Notifier (#33962)`` `097e3e618e `_ 2023-08-31 ``Fix AWS 'EmrStepSensor' ignoring the specified 'aws_conn_id' in deferred mode (#33952)`` @@ -721,22 +752,23 @@ Commit `cede385e93 `_ 2023-08-28 ``Always use 'Literal' from 'typing_extensions' (#33794)`` `b11525702c `_ 2023-08-26 ``Use literal dict instead of calling dict() in providers (#33761)`` `353b1482d3 `_ 2023-08-26 ``remove unnecessary and rewrite it using list in providers (#33763)`` -================================================================================================= =========== ==================================================================================================== +================================================================================================= =========== ======================================================================================================== 8.6.0 ..... Latest change: 2023-08-26 -================================================================================================= =========== ============================================================================================= +================================================================================================= =========== ========================================================================================================================== Commit Committed Subject -================================================================================================= =========== ============================================================================================= +================================================================================================= =========== ========================================================================================================================== `c077d19060 `_ 2023-08-26 ``Prepare docs for Aug 2023 3rd wave of Providers (#33730)`` `633217c842 `_ 2023-08-25 ``Simplify conditions on len() in providers/amazon (#33565)`` `83efcaa835 `_ 2023-08-24 ``always push ECS task ARN to xcom in 'EcsRunTaskOperator' (#33703)`` `2d8625253f `_ 2023-08-24 ``Remove non-public interface usage in EcsRunTaskOperator (#29447)`` `85acbb4ae9 `_ 2023-08-24 ``Refactor: Remove useless str() calls (#33629)`` `53a8973952 `_ 2023-08-24 ``Fix bug in task logs when using AWS CloudWatch. Do not set 'start_time' (#33673)`` +`12ee795bbf `_ 2023-08-23 ``AWS Connection Docs: Remove deprecation information due to undeprecate wrong 'aws_conn_id' fallback behaviour (#33636)`` `94f70d8184 `_ 2023-08-23 ``Replace strftime with f-strings where nicer (#33455)`` `5f504e9a17 `_ 2023-08-23 ``Upgrade botocore/aiobotocore minimum requirements (#33649)`` `85aea74b64 `_ 2023-08-23 ``Fix AWS Batch waiter failure state (#33656)`` @@ -755,7 +787,7 @@ Commit `bd11ea81e5 `_ 2023-08-13 ``Add Deferrable mode to GlueCatalogPartitionSensor (#33239)`` `c645d8e40c `_ 2023-08-12 ``D401 Support - Providers: Airbyte to Atlassian (Inclusive) (#33354)`` `0df0d7457e `_ 2023-08-11 ``Add new RdsStartExportTaskOperator parameters (#33251)`` -================================================================================================= =========== ============================================================================================= +================================================================================================= =========== ========================================================================================================================== 8.5.1 ..... @@ -770,8 +802,10 @@ Commit `c14cb85f16 `_ 2023-08-10 ``Improve fetching logs from AWS (#33231)`` `83bd60fd97 `_ 2023-08-09 ``Refactor: Simplify code in providers/amazon (#33222)`` `741fb27f50 `_ 2023-08-08 ``Implement EventBridge enable and disable rule operators (#33226)`` +`569e32b26f `_ 2023-08-08 ``AWS Authentication docs updated with generic path for credentials file (#33182)`` `8bbea9217a `_ 2023-08-08 ``Get failure information on EMR job failure (#32151)`` `46862cfa94 `_ 2023-08-06 ``Update mypy-boto3-appflow dependency (#32930)`` +`b672ba478c `_ 2023-08-06 ``Fix edge cases of "migrate/create-default-connections" (#33136)`` `76ca94d2f2 `_ 2023-08-05 ``use 'cached_property' from functools in 'RdsBaseOperator' (#33133)`` `03fcbcc96e `_ 2023-08-05 ``Use set for 'template_fields' of 'EcsDeregisterTaskDefinitionOperator' (#33129)`` ================================================================================================= =========== =================================================================================== @@ -788,6 +822,7 @@ Commit `519d99baee `_ 2023-08-04 ``Check google provider version in GCSToS3Operator before provide match_glob param (#32925)`` `bbc0968905 `_ 2023-08-04 ``openlineage, sagemaker: add missing OpenLineage type signature (#33114)`` `087d14ada2 `_ 2023-08-03 ``Deferrable mode for Sqs Sensor (#32809)`` +`e9a2bb3d4a `_ 2023-08-03 ``Extract sagemaker pipeline to their own system test (#33086)`` `4e42edb203 `_ 2023-08-03 ``Set longer default 'waiter_max_attempts' for deferred BatchJobOperator (#33045)`` `44234c2bf0 `_ 2023-08-03 ``Add S3Bucket for mypy (#33028)`` `62f9e68a54 `_ 2023-08-02 ``openlineage, sagemaker: add OpenLineage support for SageMaker's Processing, Transform and Training operators (#31816)`` @@ -813,6 +848,7 @@ Commit `282854b55f `_ 2023-07-24 ``Add endpoint_url in test_connection (#32664)`` `8012c9fce6 `_ 2023-07-24 ``Add support for querying Redshift Serverless clusters (#32785)`` `57f203251b `_ 2023-07-22 ``FIX AWS deferrable operators by using AioCredentials when using 'assume_role' (#32733)`` +`73b90c48b1 `_ 2023-07-21 ``Allow configuration to be contributed by providers (#32604)`` `815655101b `_ 2023-07-21 ``Add Deferrable mode to StepFunctionStartExecutionOperator (#32563)`` `75ed3bc3f8 `_ 2023-07-20 ``[bugfix] fix AWS triggers where deserialization would crash if region was not specified (#32729)`` `27b5f696a4 `_ 2023-07-20 ``Add Deferrable mode for EMR Serverless Start Job Operator (#32534)`` @@ -856,10 +892,12 @@ Commit `f8593503cb `_ 2023-07-05 ``Add default_deferrable config (#31712)`` `5623a21a1f `_ 2023-07-04 ``deprecate arbitrary parameter passing to RDS hook (#32352)`` `575bf2f040 `_ 2023-07-04 ``Bug fix GCSToS3Operator: avoid 'ValueError' when 'replace=False' with files already in S3 (#32322)`` +`702209898e `_ 2023-07-04 ``typo fix (#32317)`` `17132ef4c6 `_ 2023-07-04 ``Added 'AzureBlobStorageToS3Operator' transfer operator (#32270)`` `5c72befcfd `_ 2023-07-03 ``Fix 'LambdaInvokeFunctionOperator' payload parameter type (#32259)`` `d6e254db68 `_ 2023-06-30 ``Deprecate 'delimiter' param and source object's wildcards in GCS, introduce 'match_glob' param. (#31261)`` `dd937e51fe `_ 2023-06-30 ``Add 'on_finish_action' to 'KubernetesPodOperator' (#30718)`` +`779226706c `_ 2023-06-29 ``Remove duplicate and unsupported keys unchanged task (#32267)`` `e781aef1a7 `_ 2023-06-29 ``aws waiter util: log status info with error level on waiter error (#32247)`` `a46b942f48 `_ 2023-06-29 ``Add deferrable mode to S3KeysUnchangedSensor (#31940)`` `b1b69af88f `_ 2023-06-28 ``Add deferrable mode to 'RdsCreateDbInstanceOperator' and 'RdsDeleteDbInstanceOperator' (#32171)`` @@ -952,12 +990,14 @@ Commit `abea189022 `_ 2023-05-18 ``Use '__version__' in providers not 'version' (#31393)`` `f5aed58d9f `_ 2023-05-18 ``Fixing circular import error in providers caused by airflow version check (#31379)`` `0b3b6704cb `_ 2023-05-17 ``Fix AWS system test example_dynamodb_to_s3 (#31362)`` +`7ebda3898d `_ 2023-05-17 ``Fix missing line in index.rst for provider documentation (#31343)`` `e05e3a9db8 `_ 2023-05-16 ``Handle temporary credentials when resource_type is used to get custom waiters (#31333)`` `d9ff55cf6d `_ 2023-05-16 ``Prepare docs for May 2023 wave of Providers (#31252)`` `0117246db6 `_ 2023-05-15 ``Add future-compatible mongo Hook typing (#31289)`` `3193857376 `_ 2023-05-13 ``Add Deferrable Mode for EC2StateSensor (#31130)`` `6b21e4b88c `_ 2023-05-13 ``Bring back detection of implicit single-line string concatenation (#31270)`` `d6051fd10a `_ 2023-05-12 ``Add on_kill to EMR Serverless Job Operator (#31169)`` +`0d962c2967 `_ 2023-05-12 ``Add throttling section in EMR doc (#31224)`` `8a5fe6ad7d `_ 2023-05-10 ``Add retries to S3 delete_bucket (#31192)`` `cd3fa33e82 `_ 2023-05-09 ``DynamoDBToS3Operator - Add a feature to export the table to a point in time. (#31142)`` `4c9b5fe4c1 `_ 2023-05-09 ``Add deferrable param in SageMakerTransformOperator (#31063)`` @@ -973,6 +1013,7 @@ Commit `2d5166f982 `_ 2023-05-03 ``Add extras links to some more EMR Operators and Sensors (#31032)`` `0a30706aa7 `_ 2023-05-03 ``Use 'AirflowProviderDeprecationWarning' in providers (#30975)`` `eef5bc7f16 `_ 2023-05-03 ``Add full automation for min Airflow version for providers (#30994)`` +`bef892e9d2 `_ 2023-05-02 ``Update aws.rst (#31019)`` `e4d935e490 `_ 2023-05-02 ``Add tags param in RedshiftCreateClusterSnapshotOperator (#31006)`` `2f247a2ba2 `_ 2023-05-02 ``Add IAM authentication to Amazon Redshift Connection by AWS Connection (#28187)`` `9662fd8cc0 `_ 2023-05-01 ``bigfix: EMRHook Loop through paginated response to check for cluster id (#29732)`` @@ -980,6 +1021,7 @@ Commit `612676b975 `_ 2023-04-28 ``Support 'shareIdentifier' in BatchOperator (#30829)`` `1f0174931b `_ 2023-04-28 ``improve/fix glue job logs printing (#30886)`` `b36c33230f `_ 2023-04-28 ``BaseAWS - Override client when resource_type is user to get custom waiters (#30897)`` +`e5d304a570 `_ 2023-04-27 ``Improving Redshift index page (#30907)`` ================================================================================================= =========== ==================================================================================================== 8.0.0 @@ -1026,6 +1068,7 @@ Latest change: 2023-04-09 Commit Committed Subject ================================================================================================= =========== ========================================================================== `874ea9588e `_ 2023-04-09 ``Prepare docs for ad hoc release of Providers (#30545)`` +`653460b050 `_ 2023-04-08 ``Make AWS RDS operator page title consistent (#30536)`` `fa4fb1f5fe `_ 2023-04-05 ``Revert "Add AWS deferrable BatchOperator (#29300)" (#30489)`` `c1b5eafc82 `_ 2023-04-05 ``Add more info to quicksight error messages (#30466)`` `77c272e6e8 `_ 2023-04-05 ``Add AWS deferrable BatchOperator (#29300)`` @@ -1069,6 +1112,7 @@ Commit ================================================================================================= =========== ======================================================================================================================================= `fcd3c0149f `_ 2023-03-03 ``Prepare docs for 03/2023 wave of Providers (#29878)`` `76d8aaa836 `_ 2023-03-02 ``Fix Amazon ECS Enums (#29871)`` +`372ea492e5 `_ 2023-03-02 ``Update sftp_to_s3.rst (#29869)`` `faf792f634 `_ 2023-03-02 ``Implement custom boto waiters for some EMR operators (#29822)`` `ea8ce218b9 `_ 2023-03-02 ``Impovements for RedshiftDataOperator: better error reporting and an ability to return SQL results (#29434)`` `181a825259 `_ 2023-03-01 ``Use waiters in ECS Operators instead of inner sensors (#29761)`` @@ -1078,8 +1122,10 @@ Commit `df4abcbcfe `_ 2023-02-27 ``fix code checking job names in sagemaker (#29245)`` `38b901ec3f `_ 2023-02-24 ``Standardize AWS lambda naming (#29749)`` `5e006d743d `_ 2023-02-23 ``Avoid emitting fallback message for S3TaskHandler if streaming logs (#29708)`` +`8b178f185a `_ 2023-02-22 ``Clarify 'service_config' in AWS Connection (#29627)`` `ba2d562cfb `_ 2023-02-22 ``Add 'wait_for_completion' param in 'RedshiftCreateClusterOperator' (#29657)`` `6c13f04365 `_ 2023-02-22 ``AWS Glue job hook: Make s3_bucket parameter optional (#29659)`` +`37a317286a `_ 2023-02-21 ``docs: fix typo (#29658)`` `45419e23a9 `_ 2023-02-20 ``'RedshiftDataOperator' replace 'await_result' with 'wait_for_completion' (#29633)`` `0604033829 `_ 2023-02-20 ``Add Amazon Redshift-data to S3<>RS Transfer Operators (#27947)`` ================================================================================================= =========== ======================================================================================================================================= @@ -1104,11 +1150,12 @@ Commit Latest change: 2023-02-08 -================================================================================================= =========== ====================================================================================== +================================================================================================= =========== ============================================================================================= Commit Committed Subject -================================================================================================= =========== ====================================================================================== +================================================================================================= =========== ============================================================================================= `ce6ae2457e `_ 2023-02-08 ``Prepare docs for Feb 2023 wave of Providers (#29379)`` `1b18a501fe `_ 2023-02-03 ``Enable individual trigger logging (#27758)`` +`8338926f12 `_ 2023-02-03 ``Add clarification about AWS Connection behaviour (#29341)`` `cb0c90bd66 `_ 2023-02-02 ``Decrypt SecureString value obtained by SsmHook (#29142)`` `efc8857d55 `_ 2023-02-01 ``log the observed status in redshift sensor (#29274)`` `a671561b8e `_ 2023-01-31 ``shorten other wait times in sys tests (#29254)`` @@ -1118,15 +1165,17 @@ Commit `44024564cb `_ 2023-01-27 ``fix: 'num_of_dpus' typehints- GlueJobHook/Operator (#29176)`` `e1a14ae9ee `_ 2023-01-27 ``Fix false-positive spellcheck failure (#29190)`` `2493476a7e `_ 2023-01-26 ``introduce base class for EKS sensors (#29053)`` +`9b643c7201 `_ 2023-01-25 ``Add AWS system test dashboard in Amazon provider package landing page and README (#29170)`` `2c4928da40 `_ 2023-01-24 ``introduce a method to convert dictionaries to boto-style key-value lists (#28816)`` `efaed34213 `_ 2023-01-23 ``Add transfer operator S3 to (generic) SQL (#29085)`` `6190e34388 `_ 2023-01-23 ``add retries to stop_pipeline on conflict (#29077)`` `17e8bb7f9e `_ 2023-01-19 ``Update provide_bucket_name() decorator to handle new conn_type (#28706)`` `1ab7ea81a1 `_ 2023-01-19 ``uniformize getting hook through cached property in aws sensors (#29001)`` `395b731b94 `_ 2023-01-18 ``Add log for AWS Glue Job Console URL (#28925)`` +`d24527bf75 `_ 2023-01-16 ``Add documentation about cli 'add connection' and AWS connection URI (#28852)`` `3a3adfb8e6 `_ 2023-01-15 ``Fix typo in DataSyncHook boto3 methods for create location in NFS and EFS (#28948)`` `198e96aca1 `_ 2023-01-14 ``Use boto3 intersphinx inventory in documentation/docstrings. (#28945)`` -================================================================================================= =========== ====================================================================================== +================================================================================================= =========== ============================================================================================= 7.1.0 ..... @@ -1184,6 +1233,7 @@ Commit `0ef8d934f2 `_ 2022-12-06 ``Add operators + sensor for aws sagemaker pipelines (#27786)`` `7398853c86 `_ 2022-12-06 ``Update RdsHook docstrings to match correct argument names (#28108)`` `8f0265d0d9 `_ 2022-12-05 ``AWS Secrets Manager Backend - major update (#27920)`` +`c8e348dcb0 `_ 2022-12-05 ``Add automated version replacement in example dag indexes (#28090)`` `8cf6dca36b `_ 2022-12-05 ``add some important log in aws athena hook (#27917)`` `4a3a429658 `_ 2022-12-03 ``[misc] Get rid of 'pass' statement in conditions (#27775)`` `527b948856 `_ 2022-12-03 ``[misc] Replace XOR '^' conditions by 'exactly_one' helper in providers (#27858)`` @@ -1224,8 +1274,10 @@ Commit `f919abc918 `_ 2022-11-10 ``Fix backwards compatibility for RedshiftSQLOperator (#27602)`` `a5c0aeb566 `_ 2022-11-10 ``Add info about JSON Connection format for AWS SSM Parameter Store Secrets Backend (#27134)`` `2ef15c5da0 `_ 2022-11-10 ``Add default name to EMR Serverless jobs (#27458)`` +`bb6fce3138 `_ 2022-11-10 ``Change host to endpoint_url in amazon remote logging docs (#27572)`` `4dc9b1c592 `_ 2022-11-08 ``Sagemaker hook: remove extra call at the end when waiting for completion (#27551)`` `d479b99388 `_ 2022-11-07 ``Fix typo in redshift sql hook get_ui_field_behaviour (#27533)`` +`7aeaee93d1 `_ 2022-11-07 ``Add JSON format example for Amazon Web Services Connection (#27496)`` `5cd78cf425 `_ 2022-11-06 ``Upgrade dependencies in order to avoid backtracking (#27531)`` `531f2d2116 `_ 2022-11-04 ``Code quality improvements on sagemaker operators/hook (#27453)`` `1bbd8fe3ef `_ 2022-10-28 ``Fix example_emr_serverless system test (#27149)`` @@ -1236,6 +1288,7 @@ Commit `6a1a6f7bef `_ 2022-10-26 ``Adds s3_key_prefix to template fields (#27207)`` `78b8ea2f22 `_ 2022-10-24 ``Move min airflow version to 2.3.0 for all providers (#27196)`` `2a34dc9e84 `_ 2022-10-23 ``Enable string normalization in python formatting - providers (#27205)`` +`de9633f93a `_ 2022-10-22 ``Update google hooks to prefer non-prefixed extra fields (#27023)`` `737e50a02a `_ 2022-10-21 ``Fix assume role if user explicit set credentials (#26946)`` `0e8dcdc4ff `_ 2022-10-21 ``Add GlacierUploadArchiveOperator (#26652)`` `a2413cf6ca `_ 2022-10-19 ``Add RdsStopDbOperator and RdsStartDbOperator (#27076)`` @@ -1243,6 +1296,7 @@ Commit `9fed22fc99 `_ 2022-10-18 ``'GoogleApiToS3Operator' : add 'gcp_conn_id' to template fields (#27017)`` `ecd4d6654f `_ 2022-10-18 ``Add SQLExecuteQueryOperator (#25717)`` `f8d7290178 `_ 2022-10-10 ``Improve testing AWS Connection response (#26953)`` +`bd3d6d3ee7 `_ 2022-10-10 ``Added Doc - Airflow remote logging with S3 and IRSA (#25931)`` `66294de4e0 `_ 2022-10-10 ``Fix failure state in waiter call for EmrServerlessStartJobOperator. (#26853)`` `62d5bab3b4 `_ 2022-10-10 ``Convert emr_eks example dag to system test (#26723)`` `e68c8b9d52 `_ 2022-10-10 ``System test for Dynamo DB (#26729)`` @@ -1353,6 +1407,7 @@ Commit `d66e427c4d `_ 2022-07-22 ``Sagemaker System Tests - Part 3 of 3 - example_sagemaker_endpoint.py (AIP-47) (#25134)`` `f6bda38e20 `_ 2022-07-21 ``Convert RDS Export Sample DAG to System Test (AIP-47) (#25205)`` `47b72056c4 `_ 2022-07-21 ``SQSPublishOperator should allow sending messages to a FIFO Queue (#25171)`` +`5cd2d511e8 `_ 2022-07-19 ``Fix put-secret-value example (#25140)`` `6d41067cf7 `_ 2022-07-19 ``Standardize AwsLambda (#25100)`` `c6d9bccdb6 `_ 2022-07-19 ``AIP-47 - Migrate redshift DAGs to new design #22438 (#24239)`` `5a77c46bf0 `_ 2022-07-19 ``Glue Job Driver logging (#25142)`` @@ -1419,9 +1474,12 @@ Commit `b88ce95188 `_ 2022-06-07 ``Update doc and sample dag for EMR Containers (#24087)`` `c23826915d `_ 2022-06-07 ``Apply per-run log templates to log handlers (#24153)`` `41898d8922 `_ 2022-06-06 ``Refactor GlueJobHook get_or_create_glue_job method. (#24215)`` +`048b617474 `_ 2022-06-06 ``docs: amazon-provider retry modes (#23906)`` +`6015c39ce5 `_ 2022-06-06 ``adding AWS_DEFAULT_REGION to the docs, boto3 expects this to be in the env variables (#24181)`` `027b707d21 `_ 2022-06-05 ``Add explanatory note for contributors about updating Changelog (#24229)`` `daa138c8eb `_ 2022-06-04 ``fix: StepFunctionHook ignores explicit set 'region_name' (#23976)`` `5b32966c35 `_ 2022-06-03 ``Remove old Athena Sample DAG (#24170)`` +`719c2d4d9f `_ 2022-06-02 ``Convert Athena Sample DAG to System Test (#24058)`` `595981c8ad `_ 2022-05-31 ``Light Refactor and Clean-up AWS Provider (#23907)`` `94f2ce9342 `_ 2022-05-30 ``Add partition related methods to GlueCatalogHook: (#23857)`` `92ddcf4ac6 `_ 2022-05-27 ``Introduce 'flake8-implicit-str-concat' plugin to static checks (#23873)`` @@ -1510,6 +1568,7 @@ Commit ================================================================================================= =========== ================================================================================================================================================================================ `d7dbfb7e26 `_ 2022-03-22 ``Add documentation for bugfix release of Providers (#22383)`` `4de9d6622c `_ 2022-03-22 ``ImapAttachmentToS3Operator: fix it, update sample dag and update doc (#22351)`` +`c4a59bedce `_ 2022-03-22 ``Add default connection for redshift (#22263)`` `e972b6a48d `_ 2022-03-21 ``Add docs and example dag for AWS Glue (#22295)`` `dd0cbaad3f `_ 2022-03-21 ``Update doc and sample dag for S3ToSFTPOperator and SFTPToS3Operator (#22313)`` `5eb6335742 `_ 2022-03-19 ``Update sample dag and doc for S3CreateBucketOperator, S3PutBucketTaggingOperator, S3GetBucketTaggingOperator, S3DeleteBucketTaggingOperator, S3DeleteBucketOperator (#22312)`` @@ -1534,6 +1593,7 @@ Commit `46a120dc5f `_ 2022-03-11 ``Fix RedshiftDataOperator and update doc (#22157)`` `e63f6e36d1 `_ 2022-03-11 ``additional information in the ECSOperator around support of launch_type=EXTERNAL (#22093)`` `16ad03cae5 `_ 2022-03-11 ``Bugfix for retrying on provision failuers(#22137)`` +`b2e0f760de `_ 2022-03-08 ``Fix typo in AWS doc (#22097)`` `bd809bc83a `_ 2022-03-09 ``If uploading task logs to S3 fails, retry once (#21981)`` `45162565db `_ 2022-03-08 ``EMR on EKS Sample DAG and Docs Update (#22095)`` `c7286e5306 `_ 2022-03-08 ``Bug-fix GCSToS3Operator (#22071)`` @@ -1561,6 +1621,7 @@ Commit `cb24ee9414 `_ 2022-02-27 ``Add SageMakerDeleteModelOperator (#21673)`` `f0b6398dd6 `_ 2022-02-26 ``Bug Fix - S3DeleteObjectsOperator will try and delete all keys (#21458)`` `037865970b `_ 2022-02-26 ``Added Hook for Amazon RDS. Added 'boto3_stub' library for autocomplete. (#20642)`` +`272d242316 `_ 2022-02-24 ``Restore image rendering in AWS Secrets Manager Backend doc (#21772)`` `dec05fb6b2 `_ 2022-02-19 ``Update EKS sample DAGs and docs (#21523)`` `b28f4c578c `_ 2022-02-19 ``Fix Amazon SES emailer signature (#21681)`` `fc44836504 `_ 2022-02-15 ``S3KeySensor to use S3Hook url parser (#21500)`` @@ -1568,6 +1629,7 @@ Commit `21a90c5b7e `_ 2022-02-15 ``Get log events after sleep to get all logs (#21574)`` `8e81615edb `_ 2022-02-11 ``[doc] Improve s3 operator example by adding task upload_keys (#21422)`` `2c5f636e5c `_ 2022-02-11 ``Use temporary file in GCSToS3Operator (#21295)`` +`9d1c660f21 `_ 2022-02-11 ``[doc] Fix copy path in S3 to redshift (#21416)`` `598e836043 `_ 2022-02-09 ``Added SNS example DAG and rst (#21475)`` `0a3ff43d41 `_ 2022-02-08 ``Add pre-commit check for docstring param types (#21398)`` ================================================================================================= =========== ===================================================================================================================================== @@ -1666,6 +1728,7 @@ Commit `af28b41903 `_ 2021-12-05 ``Add sensor for AWS Batch (#19850) (#19885)`` `480c333c45 `_ 2021-12-03 ``Add state details to EMR container failure reason (#19579)`` `2539cb44b4 `_ 2021-12-01 ``Move to watchtower 2.0.1 (#19907)`` +`43de625d42 `_ 2021-12-01 ``Correctly capitalize names and abbreviations in docs (#19908)`` ================================================================================================= =========== ========================================================================================== 2.5.0 @@ -1708,6 +1771,7 @@ Commit `f5ad26dcdd `_ 2021-10-21 ``Fixup string concatenations (#19099)`` `86a2a19ad2 `_ 2021-10-17 ``More f-strings (#18855)`` `258451cfba `_ 2021-10-15 ``MySQLToS3Operator add support for parquet format (#18755)`` +`1571f80546 `_ 2021-10-14 ``Add pre-commit hook for common misspelling check in files (#18964)`` `176165de3b `_ 2021-10-11 ``Update S3PrefixSensor to support checking multiple prefixes within a bucket (#18807)`` `1d7cfdbcd9 `_ 2021-10-10 ``Remove extra postgres dependency from AWS Provider (#18844)`` `1df9a512c2 `_ 2021-10-08 ``Add RedshiftSQLHook, RedshiftSQLOperator (#18447)`` @@ -1736,6 +1800,7 @@ Commit `12763f125a `_ 2021-10-05 ``Add additional dependency for postgres extra for amazon provider (#18737)`` `86bf2a29ba `_ 2021-10-04 ``Simplify strings previously split across lines (#18679)`` `12133861ec `_ 2021-10-01 ``Support all Unix wildcards in S3KeySensor (#18211)`` +`af4a5e006e `_ 2021-10-01 ``Remove cargo-culted local in-page ToCs (#18668)`` `840ea3efb9 `_ 2021-09-30 ``Update documentation for September providers release (#18613)`` `8a1437e55e `_ 2021-09-29 ``Refresh credentials for long-running pods on EKS (#17951)`` `2fbd23878f `_ 2021-09-28 ``Update s3_list.py (#18561)`` @@ -1743,15 +1808,19 @@ Commit `e25eea052f `_ 2021-09-19 ``Inclusive Language (#18349)`` `4cd190c9bc `_ 2021-09-18 ``ECSOperator realtime logging (#17626)`` `27088c4533 `_ 2021-09-18 ``Add IAM Role Credentials to S3ToRedshiftTransfer and RedshiftToS3Transfer (#18156)`` +`4308a8c364 `_ 2021-09-17 ``Refactor installation pages (#18282)`` `2d4f3cb644 `_ 2021-09-15 ``Adding missing 'replace' param in docstring (#18241)`` `81ebd78db4 `_ 2021-09-12 ``Added upsert method on S3ToRedshift operator (#18027)`` +`1cb456cba1 `_ 2021-09-12 ``Add official download page for providers (#18187)`` `02397761af `_ 2021-09-12 ``Deprecate default pod name in EKSPodOperator (#18036)`` `e6cb2f7beb `_ 2021-09-10 ``ECSOperator returns last logs when ECS task fails (#17209)`` `b8795501ea `_ 2021-09-10 ``Aws secrets manager backend (#17448)`` `6e101317a2 `_ 2021-09-10 ``Simplify s3 ''unify_bucket_name_and_key'' (#17325)`` +`046f02e5a7 `_ 2021-09-09 ``fix misspelling (#18121)`` `3fe948a860 `_ 2021-09-08 ``sftp_to_s3 stream file option (#17609)`` `867e9305f0 `_ 2021-09-03 ``AwsBaseHook make 'client_type' & 'resource_type' optional params for 'get_client_type' & 'get_resource_type' (#17987)`` `bcd81f23af `_ 2021-09-03 ``Add Spark to the EMR cluster for the job flow examples (#17563)`` +`48316b9d17 `_ 2021-09-03 ``Make EMR cluster visible to all users note (#17557)`` `7c4d3173f2 `_ 2021-09-02 ``Delete unnecessary parameters in EKSPodOperator (#17960)`` ================================================================================================= =========== ========================================================================================================================= @@ -1768,6 +1837,7 @@ Commit `42e13e1a5a `_ 2021-08-30 ``Remove all deprecation warnings in providers (#17900)`` `890bd4310e `_ 2021-08-28 ``Add an Amazon EMR on EKS provider package (#16766)`` `41632e03b8 `_ 2021-08-27 ``Fix provider.yaml errors due to exit(0) in test (#17858)`` +`bcc7665684 `_ 2021-08-23 ``Improve discoverability of Provider packages' functionality`` `be75dcd39c `_ 2021-08-23 ``Update description about the new ''connection-types'' provider meta-data`` `76ed2a49c6 `_ 2021-08-19 ``Import Hooks lazily individually in providers manager (#17682)`` `bee48f31f1 `_ 2021-08-19 ``Implemented Basic EKS Integration (#16571)`` @@ -1777,8 +1847,10 @@ Commit `e7eeaa6086 `_ 2021-08-12 ``Doc: Fix docstrings for ''MongoToS3Operator'' (#17588)`` `987575787d `_ 2021-08-12 ``Add Mongo projections to hook and transfer (#17379)`` `77c4325fb0 `_ 2021-08-10 ``Fixing ParamValidationError when executing load_file in Glue hooks/operators (#16012)`` +`4675539501 `_ 2021-08-04 ``Update emr.rst (#17420)`` `d28efbfb77 `_ 2021-08-02 ``Improve AWS SQS Sensor (#16880) (#16904)`` `71088986f1 `_ 2021-08-02 ``make platform version as independent parameter of ECSOperator (#17281)`` +`c52e4f3517 `_ 2021-08-02 ``Provide information about IRSA on EKS (#17283)`` `80fc80ace6 `_ 2021-08-02 ``Fixes #16972 - Slugify role session name in AWS base hook (#17210)`` ================================================================================================= =========== ========================================================================================= @@ -1803,6 +1875,7 @@ Commit `b916b75079 `_ 2021-07-15 ``Prepare documentation for July release of providers. (#17015)`` `fc0250f1d5 `_ 2021-07-15 ``Allow attaching to previously launched task in ECSOperator (#16685)`` `d3f300fba8 `_ 2021-07-11 ``Fix wrong template_fields_renderers for AWS operators (#16820)`` +`01fb7775dd `_ 2021-07-09 ``Fixing typos in GlacierToGCSOperator documentation (#16899)`` `f0df184e4d `_ 2021-07-07 ``Update AWS Base hook to use refreshable credentials (#16770) (#16771)`` `ffe8fab653 `_ 2021-07-03 ``Added select_query to the templated fields in RedshiftToS3Operator (#16767)`` `866a601b76 `_ 2021-06-28 ``Removes pylint from our toolchain (#16682)`` @@ -1823,6 +1896,7 @@ Commit `19ed074e9c `_ 2021-06-21 ``Use safe get with AWS DMS describe replication tasks (#16540)`` `bbc627a3da `_ 2021-06-18 ``Prepares documentation for rc2 release of Providers (#16501)`` `db10c6841b `_ 2021-06-18 ``Add AWS DMS replication task operators (#15850)`` +`cf404b432d `_ 2021-06-18 ``Update Watchtower version to 1.0.6 (#16469)`` `1c82b4d015 `_ 2021-06-17 ``Fix S3ToFTPOperator (#13796)`` `36dc6a8100 `_ 2021-06-16 ``Make job name check optional in SageMakerTrainingOperator (#16327)`` `cbf8001d76 `_ 2021-06-16 ``Synchronizes updated changelog after buggfix release (#16464)`` @@ -1832,6 +1906,7 @@ Commit `30708b5b25 `_ 2021-06-13 ``Add support of capacity provider strategy for ECSOperator (#15848)`` `9c94b72d44 `_ 2021-06-07 ``Updated documentation for June 2021 provider release (#16294)`` `9fcdf3d4de `_ 2021-06-02 ``Fix S3 Select payload join (#16189)`` +`1e647029e4 `_ 2021-06-01 ``Rename the main branch of the Airflow repo to be 'main' (#16149)`` `8d16638285 `_ 2021-05-29 ``remove retry for now (#16150)`` `5fbc86f036 `_ 2021-05-27 ``Remove the 'not-allow-trailing-slash' rule on S3_hook (#15609)`` `904709d34f `_ 2021-05-27 ``Check synctatic correctness for code-snippets (#16005)`` @@ -1869,16 +1944,18 @@ Commit 1.3.0 ..... -Latest change: 2021-04-06 +Latest change: 2021-04-07 ================================================================================================= =========== ============================================================================= Commit Committed Subject ================================================================================================= =========== ============================================================================= +`4e018a870c `_ 2021-04-07 ``Refreshed provider's upcoming release with k8s retries (#15239)`` `042be2e4e0 `_ 2021-04-06 ``Updated documentation for provider packages before April release (#15236)`` `266384a63f `_ 2021-04-05 ``Fix string concatenation using 'f-strings' (#15200)`` `eda538f56c `_ 2021-04-02 ``AWS: Do not log info when SSM & SecretsManager secret not found (#15120)`` `9b76b94c94 `_ 2021-04-02 ``A bunch of template_fields_renderers additions (#15130)`` `6822665102 `_ 2021-04-01 ``Send region_name into parant class of AwsGlueJobHook (#14251)`` +`a607002657 `_ 2021-04-01 ``Little clarification in Aws connection docs (#14290)`` `5379698892 `_ 2021-03-27 ``Improve docstrings for various modules (#15047)`` `614be87b23 `_ 2021-03-26 ``Added retry to ECS Operator (#14263)`` `a7ca1b3b0b `_ 2021-03-26 ``Fix Sphinx Issues with Docstrings (#14968)`` @@ -1891,6 +1968,7 @@ Commit `e7bb17aeb8 `_ 2021-03-06 ``Use built-in 'cached_property' on Python 3.8 where possible (#14606)`` `697abf399d `_ 2021-03-05 ``S3DataSource is not required (#14220)`` `8ced652ecf `_ 2021-02-28 ``Implemented S3 Bucket Tagging (#14402)`` +`1c694318ac `_ 2021-02-28 ``chore: fix case of GitHub (#14525)`` ================================================================================================= =========== ============================================================================= 1.2.0 @@ -1902,6 +1980,7 @@ Latest change: 2021-02-27 Commit Committed Subject ================================================================================================= =========== ========================================================================== `589d6dec92 `_ 2021-02-27 ``Prepare to release the next wave of providers: (#14487)`` +`50a1504c52 `_ 2021-02-27 ``Fix spellings (#14483)`` `13854c32a3 `_ 2021-02-27 ``Adding support to put extra arguments for Glue Job. (#14027)`` `0d6cae4172 `_ 2021-02-24 ``Avoid using threads in S3 remote logging uplod (#14414)`` `ca35bd7f7f `_ 2021-02-21 ``By default PIP will install all packages in .local folder (#14125)`` @@ -1927,6 +2006,7 @@ Commit `d0ab7f6d3a `_ 2021-01-25 ``Add ExasolToS3Operator (#13847)`` `6d55f329f9 `_ 2021-01-25 ``AWS Glue Crawler Integration (#13072)`` `f473ca7130 `_ 2021-01-24 ``Replace 'google_cloud_storage_conn_id' by 'gcp_conn_id' when using 'GCSHook' (#13851)`` +`8ac6deaa39 `_ 2021-01-23 ``Fix PyPI spelling (#13864)`` `a9ac2b040b `_ 2021-01-23 ``Switch to f-strings using flynt. (#13732)`` `3fd5ef3555 `_ 2021-01-21 ``Add missing logos for integrations (#13717)`` `29730d7200 `_ 2021-01-20 ``Add acl_policy to S3CopyObjectOperator (#13773)`` @@ -1937,11 +2017,14 @@ Commit `308f1d0666 `_ 2021-01-07 ``[AIRFLOW-3723] Add Gzip capability to mongo_to_S3 operator (#13187)`` `f69405fb0b `_ 2021-01-07 ``Fix S3KeysUnchangedSensor so that template_fields work (#13490)`` `4e479e1e1b `_ 2021-01-06 ``Add S3KeySizeSensor (#13049)`` +`ac730c3ddd `_ 2021-01-02 ``Improve style of code block on aws-ssm-parameter-store.rst (#13428)`` `f7a1334abe `_ 2021-01-02 ``Add 'mongo_collection' to template_fields in MongoToS3Operator (#13361)`` +`d202fd47fa `_ 2020-12-31 ``Adding documentation explaining "strange" URI required when using AWS… (#13355)`` `bd74eb0ca0 `_ 2020-12-31 ``Allow Tags on AWS Batch Job Submission (#13396)`` `295d66f914 `_ 2020-12-30 ``Fix Grammar in PIP warning (#13380)`` `625576a3af `_ 2020-12-18 ``Fix spelling (#13135)`` `6cf76d7ac0 `_ 2020-12-18 ``Fix typo in pip upgrade command :( (#13148)`` +`f6448b4e48 `_ 2020-12-15 ``Add link to PyPI Repository to provider docs (#13064)`` `5090fb0c89 `_ 2020-12-15 ``Add script to generate integrations.json (#13073)`` ================================================================================================= =========== ========================================================================================= @@ -1957,12 +2040,15 @@ Commit `d5589673a9 `_ 2020-12-08 ``Move dummy_operator.py to dummy.py (#11178) (#11293)`` `b40dffa085 `_ 2020-12-08 ``Rename remaing modules to match AIP-21 (#12917)`` `9b39f24780 `_ 2020-12-08 ``Add support for dynamic connection form fields per provider (#12558)`` +`23e743ec09 `_ 2020-12-07 ``Move task handlers guides to provider docs (#12826)`` +`70b1cc2cfc `_ 2020-12-04 ``Move secret backends guides to provider docs (#12798)`` `bd90136aaf `_ 2020-11-30 ``Move operator guides to provider documentation packages (#12681)`` `02d94349be `_ 2020-11-29 ``Don't use time.time() or timezone.utcnow() for duration calculations (#12353)`` `de3b1e687b `_ 2020-11-28 ``Move connection guides to provider documentation packages (#12653)`` `663259d4b5 `_ 2020-11-25 ``Fix AWS DataSync tests failing (#11020)`` `3fa51f94d7 `_ 2020-11-24 ``Add check for duplicates in provider.yaml files (#12578)`` `ed09915a02 `_ 2020-11-23 ``[AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389)`` +`f2569de7d1 `_ 2020-11-22 ``Add example DAGs to provider docs (#12528)`` `370e7d07d1 `_ 2020-11-21 ``Fix Python Docstring parameters (#12513)`` `c34ef853c8 `_ 2020-11-20 ``Separate out documentation building per provider (#12444)`` `0080354502 `_ 2020-11-18 ``Update provider READMEs for 1.0.0b2 batch release (#12449)`` diff --git a/docs/apache-airflow-providers-fab/commits.rst b/docs/apache-airflow-providers-fab/commits.rst index 7c60cf86a16d..9f100b594e04 100644 --- a/docs/apache-airflow-providers-fab/commits.rst +++ b/docs/apache-airflow-providers-fab/commits.rst @@ -38,22 +38,27 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-31 ``Add logging to the migration commands (#43516)`` +`5886016243 `_ 2024-10-31 ``Prepare docs for Oct 2nd wave of providers RC2 (#43540)`` +`db4afd793a `_ 2024-10-31 ``DOC fix documentation error in 'apache-airflow-providers-fab/access-control.rst' (#43495)`` `d8c7d28411 `_ 2024-10-30 ``Start porting DAG definition code to the Task SDK (#43076)`` `baf2b3cb44 `_ 2024-10-29 ``fix(providers/fab): alias is_authorized_dataset to is_authorized_asset (#43469)`` `78ff0a9970 `_ 2024-10-27 ``Prepare docs for Oct 2nd wave of providers (#43409)`` `1f4b306c80 `_ 2024-10-25 ``Fix revoke Dag stale permission on airflow < 2.10 (#42844)`` `d7f50baa6f `_ 2024-10-23 ``Bump Flask-AppBuilder to ''4.5.2'' (#43309)`` `84ff10bf06 `_ 2024-10-23 ``Upgrade FAB to 4.5.1 (#43251)`` +`d186d3fb50 `_ 2024-10-23 ``Rename dataset as asset in UI (#43073)`` `7324cdaa91 `_ 2024-10-17 ``feat(providers/fab): Use asset in common provider (#43112)`` +`520c443656 `_ 2024-10-15 ``fix: Change CustomSecurityManager method name (#43034)`` `c7104f53b9 `_ 2024-10-10 ``Move user and roles schemas to fab provider (#42869)`` `978bb0c6b6 `_ 2024-10-10 ``Move the session auth backend to FAB auth manager (#42878)`` `857ca4c06c `_ 2024-10-09 ``Split providers out of the main "airflow/" tree into a UV workspace project (#42505)`` -================================================================================================= =========== ======================================================================================== +================================================================================================= =========== ============================================================================================= 1.4.1 ..... @@ -64,6 +69,7 @@ Latest change: 2024-10-09 Commit Committed Subject ================================================================================================= =========== ================================================================================================================================ `2bb8628463 `_ 2024-10-09 ``Prepare docs for Oct 1st adhoc wave of providers (#42862)`` +`ef981f18ce `_ 2024-10-04 ``Rename dataset endpoints as asset endpoints (#42579)`` `9536c98a43 `_ 2024-10-01 ``Update Rest API tests to no longer rely on FAB auth manager. Move tests specific to FAB permissions to FAB provider (#42523)`` `ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` `2beb6a765d `_ 2024-09-25 ``Simplify expression for get_permitted_dag_ids query (#42484)`` @@ -74,12 +80,14 @@ Commit Latest change: 2024-09-21 -================================================================================================= =========== =================================================================================== +================================================================================================= =========== ==================================================================================== Commit Committed Subject -================================================================================================= =========== =================================================================================== +================================================================================================= =========== ==================================================================================== `7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `6a527c9fac `_ 2024-09-21 ``Fix pre-commit for auto update of fab migration versions (#42382)`` `8741e9c176 `_ 2024-09-20 ``Handle 'AUTH_ROLE_PUBLIC' in FAB auth manager (#42280)`` +`ee87fa0cba `_ 2024-09-20 ``Minor fixups to FAB DB command docs (#42377)`` +`58b3771bf0 `_ 2024-09-20 ``Add documentation for FAB DB commands (#42352)`` `9f167bbc34 `_ 2024-09-19 ``Add FAB migration commands (#41804)`` `db7f92787a `_ 2024-09-17 ``Deprecated kerberos auth removed (#41693)`` `d1e500c450 `_ 2024-09-16 ``Deprecated configuration removed (#42129)`` @@ -87,11 +95,14 @@ Commit `7b6eb92537 `_ 2024-09-04 ``Move 'register_views' to auth manager interface (#41777)`` `1379376b66 `_ 2024-09-02 ``Add TODOs in providers code for Subdag code removal (#41963)`` `f16107017c `_ 2024-09-02 ``Revert "Provider fab auth manager deprecated methods removed (#41720)" (#41960)`` +`433bfd4a5f `_ 2024-08-27 ``chore(docs): add an example for auth with keycloak (#41687)`` `b0391838c1 `_ 2024-08-26 ``Provider fab auth manager deprecated methods removed (#41720)`` `59dc98178b `_ 2024-08-25 ``Separate FAB migration from Core Airflow migration (#41437)`` +`67a7923408 `_ 2024-08-25 ``deprecated fab auth manager removed (#41708)`` +`c2a9833ba7 `_ 2024-08-22 ``feat: deprecated basic auth airflow.api.auth.backend.basic_auth removed (#41663)`` `c78a004210 `_ 2024-08-20 ``Add fixes by breeze/precommit-lint static checks (#41604) (#41618)`` `d6df0786cf `_ 2024-08-20 ``Make kerberos an optional and devel dependency for impala and fab (#41616)`` -================================================================================================= =========== =================================================================================== +================================================================================================= =========== ==================================================================================== 1.3.0 ..... @@ -102,6 +113,7 @@ Latest change: 2024-08-19 Commit Committed Subject ================================================================================================= =========== ========================================================================== `75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` +`8807f73dfd `_ 2024-08-16 ``Delete experimental API (#41434)`` `6570c6d1bb `_ 2024-08-13 ``Remove deprecated SubDags (#41390)`` `090607d92a `_ 2024-08-08 ``Feature: Allow set Dag Run resource into Dag Level permission (#40703)`` ================================================================================================= =========== ========================================================================== @@ -154,16 +166,18 @@ Commit Latest change: 2024-05-26 -================================================================================================= =========== =============================================================== +================================================================================================= =========== ====================================================================== Commit Committed Subject -================================================================================================= =========== =============================================================== +================================================================================================= =========== ====================================================================== `34500f3a2f `_ 2024-05-26 ``Prepare docs 3rd wave May 2024 (#39738)`` `2b1a2f8d56 `_ 2024-05-11 ``Reapply templates for all providers (#39554)`` `2c05187b07 `_ 2024-05-10 ``Faster 'airflow_version' imports (#39552)`` `73918925ed `_ 2024-05-08 ``Simplify 'airflow_version' imports (#39497)`` +`4a1d040973 `_ 2024-05-07 ``Minor fixup for custom FAB permission consistency warning (#39469)`` +`cbebb4837f `_ 2024-05-07 ``Add description about custom FAB permission consistency (#39459)`` `959e52bf3c `_ 2024-05-02 ``Simplify action name retrieval in FAB auth manager (#39358)`` `4910c0439b `_ 2024-05-01 ``Add 'jmespath' as an explicit dependency (#39350)`` -================================================================================================= =========== =============================================================== +================================================================================================= =========== ====================================================================== 1.1.0 ..... @@ -238,11 +252,12 @@ Commit Latest change: 2024-02-14 -================================================================================================= =========== ========================================================================================= +================================================================================================= =========== ==================================================================================================================== Commit Committed Subject -================================================================================================= =========== ========================================================================================= +================================================================================================= =========== ==================================================================================================================== `54a400fcb6 `_ 2024-02-14 ``Mark FAB provider as ready (#37362)`` `ec97a07197 `_ 2024-02-13 ``standardize get_app appbuilder usage (#37397)`` +`f61ffe58d3 `_ 2024-02-11 ``Remove extra package headers in provider indexes (#37324)`` `28f94f8891 `_ 2024-02-10 ``Move 'IMPORT_ERROR' from DAG related permissions to view related permissions (#37292)`` `00ed46769e `_ 2024-02-09 ``D401 support in fab provider (#37283)`` `e99cfbbd51 `_ 2024-02-07 ``Upgrade to FAB 4.3.11 (#37233)`` @@ -251,6 +266,7 @@ Commit `0fce3b6047 `_ 2024-01-28 ``Add "airflow users reset-password" command (#37044)`` `ce246c0ed8 `_ 2024-01-26 ``fix for role and permission count in export (#36589)`` `18d2498e44 `_ 2024-01-11 ``Generate doc for fab CLI commands (#36672)`` +`485ddbf968 `_ 2024-01-11 ``Add deprecation information in deprecated endpoints + geenrate Rest API documentation from fab provider (#36664)`` `c439ab87c4 `_ 2024-01-10 ``Standardize airflow build process and switch to Hatchling build backend (#36537)`` `28cad70223 `_ 2024-01-03 ``Move config related to FAB auth manager to FAB provider (#36232)`` `2093b6f3b9 `_ 2024-01-03 ``Fix security manager inheritance in fab provider (#36538)`` @@ -259,7 +275,7 @@ Commit `6937ae7647 `_ 2023-12-30 ``Speed up autocompletion of Breeze by simplifying provider state (#36499)`` `83bdc297ce `_ 2023-12-30 ``added cli command to list auth managers under 'airflow providers' (#36445)`` `341d5b747d `_ 2023-12-23 ``Add feture of "not-ready" provider. (#36391)`` -================================================================================================= =========== ========================================================================================= +================================================================================================= =========== ==================================================================================================================== 1.0.0 ..... @@ -270,7 +286,11 @@ Latest change: 2023-12-23 Commit Committed Subject ================================================================================================= =========== ================================================================================== `b15d5578da `_ 2023-12-23 ``Re-apply updated version numbers to 2nd wave of providers in December (#36380)`` +`fd86fae5d5 `_ 2023-12-22 ``Document the missing website permission (#36329)`` +`1e6fa73575 `_ 2023-12-22 ``fix wrong sample code for "dag level permissions" (#36350)`` `2be7149598 `_ 2023-12-21 ``fix inheritance checking of security manager in FabAuthManager (#36343)`` +`b35b08ec41 `_ 2023-12-20 ``Improve pre-commit to generate Airflow diagrams as a code (#36333)`` +`475818542e `_ 2023-12-19 ``Create auth manager documentation (#36211)`` `e9ba37bb58 `_ 2023-12-17 ``Add code snippet formatting in docstrings via Ruff (#36262)`` `f7f7183617 `_ 2023-12-17 ``Update permission docs (#36120)`` `4d96a9a3a1 `_ 2023-12-13 ``Remove dependency of 'Connexion' from auth manager interface (#36209)`` diff --git a/providers/src/airflow/providers/amazon/CHANGELOG.rst b/providers/src/airflow/providers/amazon/CHANGELOG.rst index 0ab9ee54109c..8b684ab11c3b 100644 --- a/providers/src/airflow/providers/amazon/CHANGELOG.rst +++ b/providers/src/airflow/providers/amazon/CHANGELOG.rst @@ -49,6 +49,7 @@ Misc * ``feat(providers/amazon): Use asset in common provider (#43110)`` * ``Restrict looker-sdk version 24.18.0 and microsoft-kiota-http 1.3.4 (#42954)`` * ``Limit mypy-boto3-appflow (#43436)`` +* ``Move PythonOperator to Standard provider (#42081)`` .. Below changes are excluded from the changelog. Move them to appropriate section above if needed. Do not delete the lines(!): @@ -56,6 +57,7 @@ Misc * ``Split providers out of the main "airflow/" tree into a UV workspace project (#42505)`` * ``Start porting DAG definition code to the Task SDK (#43076)`` * ``Prepare docs for Oct 2nd wave of providers (#43409)`` + * ``Prepare docs for Oct 2nd wave of providers RC2 (#43540)`` 9.0.0 ..... diff --git a/providers/src/airflow/providers/fab/CHANGELOG.rst b/providers/src/airflow/providers/fab/CHANGELOG.rst index 2dc9b9b08d0b..fd62c0c964ce 100644 --- a/providers/src/airflow/providers/fab/CHANGELOG.rst +++ b/providers/src/airflow/providers/fab/CHANGELOG.rst @@ -31,8 +31,9 @@ Features Bug Fixes ~~~~~~~~~ -* ``Fix revoke Dag stale permission on airflow < 2.10 (#42844)`` +* ``fix revoke Dag stale permission on airflow < 2.10 (#42844)`` * ``fix(providers/fab): alias is_authorized_dataset to is_authorized_asset (#43469)`` +* ``fix: Change CustomSecurityManager method name (#43034)`` Misc ~~~~ @@ -41,13 +42,16 @@ Misc * ``Upgrade Flask-AppBuilder to 4.5.1 (#43251)`` * ``Move user and roles schemas to fab provider (#42869)`` * ``Move the session auth backend to FAB auth manager (#42878)`` - +* ``Add logging to the migration commands (#43516)`` +* ``DOC fix documentation error in 'apache-airflow-providers-fab/access-control.rst' (#43495)`` +* ``Rename dataset as asset in UI (#43073)`` .. Below changes are excluded from the changelog. Move them to appropriate section above if needed. Do not delete the lines(!): * ``Split providers out of the main "airflow/" tree into a UV workspace project (#42505)`` * ``Start porting DAG definition code to the Task SDK (#43076)`` * ``Prepare docs for Oct 2nd wave of providers (#43409)`` + * ``Prepare docs for Oct 2nd wave of providers RC2 (#43540)`` 1.4.1 ..... From 0664782c19dc40a311170c19b831dbd3bf1052a0 Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Sun, 3 Nov 2024 12:59:50 +0200 Subject: [PATCH 014/137] Fix docstring for AthenaTrigger (#43616) --- providers/src/airflow/providers/amazon/aws/triggers/athena.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/providers/src/airflow/providers/amazon/aws/triggers/athena.py b/providers/src/airflow/providers/amazon/aws/triggers/athena.py index a6ca58a2a250..6468431127cb 100644 --- a/providers/src/airflow/providers/amazon/aws/triggers/athena.py +++ b/providers/src/airflow/providers/amazon/aws/triggers/athena.py @@ -29,8 +29,7 @@ class AthenaTrigger(AwsBaseWaiterTrigger): """ Trigger for AthenaOperator. - The trigger will asynchronously poll the boto3 API and wait for the - Redshift cluster to be in the `available` state. + The trigger will asynchronously poll the boto3 API :param query_execution_id: ID of the Athena query execution to watch :param waiter_delay: The amount of time in seconds to wait between attempts. From ff6038b8d154ccdde65a4ebd652ad41d88e8c33e Mon Sep 17 00:00:00 2001 From: JKrehling <25485833+JKrehling@users.noreply.github.com> Date: Sun, 3 Nov 2024 09:10:13 -0500 Subject: [PATCH 015/137] updated git-sync to 4.3.0 (#41411) Co-authored-by: Jon Krehling --- chart/newsfragments/41411.significant.rst | 3 +++ chart/values.schema.json | 2 +- chart/values.yaml | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 chart/newsfragments/41411.significant.rst diff --git a/chart/newsfragments/41411.significant.rst b/chart/newsfragments/41411.significant.rst new file mode 100644 index 000000000000..25c1342f573c --- /dev/null +++ b/chart/newsfragments/41411.significant.rst @@ -0,0 +1,3 @@ +Default git-sync image is updated to ``4.3.0`` + +The default git-sync image that is used with the Chart is now ``4.3.0``, previously it was ``4.1.0``. diff --git a/chart/values.schema.json b/chart/values.schema.json index 9d0887bbfcae..ea673a40c2d6 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -949,7 +949,7 @@ "tag": { "description": "The gitSync image tag.", "type": "string", - "default": "v4.1.0" + "default": "v4.3.0" }, "pullPolicy": { "description": "The gitSync image pull policy.", diff --git a/chart/values.yaml b/chart/values.yaml index 18a1e91a7b6c..6c3b2aeb9bd1 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -123,7 +123,7 @@ images: pullPolicy: IfNotPresent gitSync: repository: registry.k8s.io/git-sync/git-sync - tag: v4.1.0 + tag: v4.3.0 pullPolicy: IfNotPresent # Select certain nodes for airflow pods. From 12950dd03b8ffea7bfe206dea40a74dab8694593 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 4 Nov 2024 13:01:38 +0100 Subject: [PATCH 016/137] Enable back iterative development of latest providers with old airflows (#43617) * Enable back iterative development of latest providers with old airflows The compatibility tests in CI are using providers built as packages from sources, so the compatibility tests run there using "providers/tests" work just fine, because all providers are installed in the airflow.providers site library. However when we are iterating and debugging backwards compatiblity provider tests, we should be able to use local provider sources, rather than installed packages and we have the possibility of mounting both - providers sources and tests to the image. See `contributing-docs/testing/unit_tests.rst` on how to do it by using ``--mount-sources providers-and-tests`` flag connected with `--use-airflow-version`. However as of #42505 this has been broken, because currently in main we rely on airflow having "pkgutil" namespace package for both - airflow, and airflow.providers packages (previous airflow versions had implicit package for airflow.providers package) - so providers installed locally cannot be used as "another" source of providers. Previously it was working because both "installed" and "sources" `airflow.providers` package were implicit namespace packages. As explained in https://packaging.python.org/en/latest/guides/packaging-namespace-packages/#native-namespace-packages > Every distribution that uses the namespace package must include such > an `__init__.py`. If any distribution does not, it will cause the > namespace logic to fail and the other sub-packages will not be > importable. Any additional code in __init__.py will be inaccessible. So because old airflow uses implicit provider's packages and main airflow from source uses "explicit" provider's package, the only way we can make the "source" providers is to mount them or symbolically link them to inside installed distribution of airflow package (in site directory) (or dynamically remove the __init__.py from provider's source directory. We cannot mount the provider package sources ot inside the installed airflow - because when --use-airflow-version is used, airflow is installed dynamically inside the container - after the container is started. This PR solves the problem by adding an env variable that will make the initialization script to remove the installed airflow.providers folder after installing airflow and linking the "providers/src/airflow/providers" folder there. This has the added benefit that all providers (including the preinstalled ones) are used from "main" sources rather than from installed packages - which was problematic for the past way of using providers from sources - which used the fact that both "airflow.providers" in the site-library and the one in sources were implicit namespace packages. * Update Dockerfile.ci Co-authored-by: GPK * Update scripts/docker/entrypoint_ci.sh Co-authored-by: GPK --------- Co-authored-by: GPK --- Dockerfile.ci | 7 +++++++ scripts/ci/docker-compose/providers-and-tests-sources.yml | 5 +++-- scripts/docker/entrypoint_ci.sh | 7 +++++++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/Dockerfile.ci b/Dockerfile.ci index 666344e28525..6ddf2f4e1ac4 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1017,6 +1017,13 @@ function determine_airflow_to_use() { --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues pip uninstall -y typing || true + if [[ ${LINK_PROVIDERS_TO_AIRFLOW_PACKAGE=} == "true" ]]; then + echo + echo "${COLOR_BLUE}Linking providers to airflow package as we are using them from mounted sources.${COLOR_RESET}" + echo + rm -rf /usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers + ln -s "${AIRFLOW_SOURCES}/providers/src/airflow/providers" "/usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers" + fi fi if [[ "${USE_AIRFLOW_VERSION}" =~ ^2\.2\..*|^2\.1\..*|^2\.0\..* && "${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=}" != "" ]]; then diff --git a/scripts/ci/docker-compose/providers-and-tests-sources.yml b/scripts/ci/docker-compose/providers-and-tests-sources.yml index 29498ae27eb9..8a06f2fcc0d1 100644 --- a/scripts/ci/docker-compose/providers-and-tests-sources.yml +++ b/scripts/ci/docker-compose/providers-and-tests-sources.yml @@ -21,6 +21,7 @@ services: tty: true # docker run -t environment: - AIRFLOW__CORE__PLUGINS_FOLDER=/files/plugins + - LINK_PROVIDERS_TO_AIRFLOW_PACKAGE=true # We only mount tests folder volumes: - ../../../.bash_aliases:/root/.bash_aliases:cached @@ -30,8 +31,8 @@ services: - ../../../empty:/opt/airflow/airflow # but keep tests - ../../../tests/:/opt/airflow/tests:cached - # and providers - - ../../../providers/src/airflow/providers:/opt/airflow/airflow/providers:cached + # Mount providers to make sure that we have the latest providers - both tests and sources + - ../../../providers/:/opt/airflow/providers:cached # and entrypoint and in_container scripts for testing - ../../../scripts/docker/entrypoint_ci.sh:/entrypoint - ../../../scripts/in_container/:/opt/airflow/scripts/in_container diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index cbd7bdce141e..8e864ba83121 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -236,6 +236,13 @@ function determine_airflow_to_use() { --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues pip uninstall -y typing || true + if [[ ${LINK_PROVIDERS_TO_AIRFLOW_PACKAGE=} == "true" ]]; then + echo + echo "${COLOR_BLUE}Linking providers to airflow package as we are using them from mounted sources.${COLOR_RESET}" + echo + rm -rf /usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers + ln -s "${AIRFLOW_SOURCES}/providers/src/airflow/providers" "/usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers" + fi fi if [[ "${USE_AIRFLOW_VERSION}" =~ ^2\.2\..*|^2\.1\..*|^2\.0\..* && "${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=}" != "" ]]; then From 20e6517ed334e5d76015bd3e170933b25f9337b5 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Mon, 4 Nov 2024 19:55:36 +0530 Subject: [PATCH 017/137] AIP-84 Migrate DagWarning public endpoint to FastAPI (#42749) * add dag warning endpoint * refactor tests * fix test * Update tests/api_fastapi/core_api/routes/public/test_dag_warning.py * Update tests/api_fastapi/core_api/routes/public/test_dag_warning.py * Update tests/api_fastapi/core_api/routes/public/test_dag_warning.py --- .../endpoints/dag_warning_endpoint.py | 2 + airflow/api_fastapi/common/parameters.py | 32 +++++ .../core_api/openapi/v1-generated.yaml | 120 ++++++++++++++++++ .../core_api/routes/public/__init__.py | 2 + .../core_api/routes/public/dag_warning.py | 72 +++++++++++ .../core_api/serializers/dag_warning.py | 40 ++++++ airflow/ui/openapi-gen/queries/common.ts | 31 ++++- airflow/ui/openapi-gen/queries/prefetch.ts | 48 ++++++- airflow/ui/openapi-gen/queries/queries.ts | 50 ++++++++ airflow/ui/openapi-gen/queries/suspense.ts | 51 +++++++- .../ui/openapi-gen/requests/schemas.gen.ts | 55 ++++++++ .../ui/openapi-gen/requests/services.gen.ts | 37 ++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 59 +++++++++ .../routes/public/test_dag_warning.py | 85 +++++++++++++ 14 files changed, 681 insertions(+), 3 deletions(-) create mode 100644 airflow/api_fastapi/core_api/routes/public/dag_warning.py create mode 100644 airflow/api_fastapi/core_api/serializers/dag_warning.py create mode 100644 tests/api_fastapi/core_api/routes/public/test_dag_warning.py diff --git a/airflow/api_connexion/endpoints/dag_warning_endpoint.py b/airflow/api_connexion/endpoints/dag_warning_endpoint.py index 8a15a30cece8..a158c3f443c8 100644 --- a/airflow/api_connexion/endpoints/dag_warning_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_warning_endpoint.py @@ -29,6 +29,7 @@ from airflow.api_connexion.security import get_readable_dags from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.models.dagwarning import DagWarning as DagWarningModel +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.db import get_query_count from airflow.utils.session import NEW_SESSION, provide_session @@ -38,6 +39,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.WARNING) @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_fastapi/common/parameters.py b/airflow/api_fastapi/common/parameters.py index 7137a0a12484..bd6501763722 100644 --- a/airflow/api_fastapi/common/parameters.py +++ b/airflow/api_fastapi/common/parameters.py @@ -31,6 +31,7 @@ from airflow.models import Base, Connection from airflow.models.dag import DagModel, DagTag from airflow.models.dagrun import DagRun +from airflow.models.dagwarning import DagWarning, DagWarningType from airflow.utils import timezone from airflow.utils.state import DagRunState @@ -292,6 +293,34 @@ def _safe_parse_datetime(date_to_check: str) -> datetime: ) +class _WarningTypeFilter(BaseParam[str]): + """Filter on warning type.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(DagWarning.warning_type == self.value) + + def depends(self, warning_type: DagWarningType | None = None) -> _WarningTypeFilter: + return self.set_value(warning_type) + + +class _DagIdFilter(BaseParam[str]): + """Filter on dag_id.""" + + def __init__(self, attribute: ColumnElement, skip_none: bool = True) -> None: + super().__init__(skip_none) + self.attribute = attribute + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(self.attribute == self.value) + + def depends(self, dag_id: str | None = None) -> _DagIdFilter: + return self.set_value(dag_id) + + # Common Safe DateTime DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] # DAG @@ -310,5 +339,8 @@ def _safe_parse_datetime(date_to_check: str) -> datetime: QueryOwnersFilter = Annotated[_OwnersFilter, Depends(_OwnersFilter().depends)] # DagRun QueryLastDagRunStateFilter = Annotated[_LastDagRunStateFilter, Depends(_LastDagRunStateFilter().depends)] +# DAGWarning +QueryDagIdInDagWarningFilter = Annotated[_DagIdFilter, Depends(_DagIdFilter(DagWarning.dag_id).depends)] +QueryWarningTypeFilter = Annotated[_WarningTypeFilter, Depends(_WarningTypeFilter().depends)] # DAGTags QueryDagTagPatternSearch = Annotated[_DagTagNamePatternSearch, Depends(_DagTagNamePatternSearch().depends)] diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index b7e7f6269371..19a4875e704d 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1111,6 +1111,76 @@ paths: application/json: schema: $ref: '#/components/schemas/HealthInfoSchema' + /public/dagWarnings: + get: + tags: + - DagWarning + summary: List Dag Warnings + description: Get a list of DAG warnings. + operationId: list_dag_warnings + parameters: + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: warning_type + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagWarningType' + - type: 'null' + title: Warning Type + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: dag_id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGWarningCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/plugins/: get: tags: @@ -2505,6 +2575,44 @@ components: - total_entries title: DAGTagCollectionResponse description: DAG Tags Collection serializer for responses. + DAGWarningCollectionResponse: + properties: + dag_warnings: + items: + $ref: '#/components/schemas/DAGWarningResponse' + type: array + title: Dag Warnings + total_entries: + type: integer + title: Total Entries + type: object + required: + - dag_warnings + - total_entries + title: DAGWarningCollectionResponse + description: DAG warning collection serializer for responses. + DAGWarningResponse: + properties: + dag_id: + type: string + title: Dag Id + warning_type: + $ref: '#/components/schemas/DagWarningType' + message: + type: string + title: Message + timestamp: + type: string + format: date-time + title: Timestamp + type: object + required: + - dag_id + - warning_type + - message + - timestamp + title: DAGWarningResponse + description: DAG Warning serializer for responses. DAGWithLatestDagRunsCollectionResponse: properties: total_entries: @@ -2744,6 +2852,18 @@ components: title: DagTagPydantic description: Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API. + DagWarningType: + type: string + enum: + - asset conflict + - non-existent pool + title: DagWarningType + description: 'Enum for DAG warning types. + + + This is the set of allowable values for the ``warning_type`` field + + in the DagWarning model.' EventLogResponse: properties: event_log_id: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index cc9dd9c5e1ba..a153952287bc 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -21,6 +21,7 @@ from airflow.api_fastapi.core_api.routes.public.connections import connections_router from airflow.api_fastapi.core_api.routes.public.dag_run import dag_run_router from airflow.api_fastapi.core_api.routes.public.dag_sources import dag_sources_router +from airflow.api_fastapi.core_api.routes.public.dag_warning import dag_warning_router from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router from airflow.api_fastapi.core_api.routes.public.monitor import monitor_router @@ -40,6 +41,7 @@ public_router.include_router(dags_router) public_router.include_router(event_logs_router) public_router.include_router(monitor_router) +public_router.include_router(dag_warning_router) public_router.include_router(plugins_router) public_router.include_router(pools_router) public_router.include_router(providers_router) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_warning.py b/airflow/api_fastapi/core_api/routes/public/dag_warning.py new file mode 100644 index 000000000000..a388fae13be1 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/dag_warning.py @@ -0,0 +1,72 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends +from sqlalchemy import select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryDagIdInDagWarningFilter, + QueryLimit, + QueryOffset, + QueryWarningTypeFilter, + SortParam, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.dag_warning import ( + DAGWarningCollectionResponse, + DAGWarningResponse, +) +from airflow.models import DagWarning + +dag_warning_router = AirflowRouter(tags=["DagWarning"]) + + +@dag_warning_router.get("/dagWarnings", responses=create_openapi_http_exception_doc([401, 403])) +async def list_dag_warnings( + dag_id: QueryDagIdInDagWarningFilter, + warning_type: QueryWarningTypeFilter, + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends(SortParam(["dag_id", "warning_type", "message", "timestamp"], DagWarning).dynamic_depends()), + ], + session: Annotated[Session, Depends(get_session)], +) -> DAGWarningCollectionResponse: + """Get a list of DAG warnings.""" + dag_warnings_select, total_entries = paginated_select( + select(DagWarning), [warning_type, dag_id], order_by, offset, limit, session + ) + + dag_warnings = session.scalars(dag_warnings_select).all() + + return DAGWarningCollectionResponse( + dag_warnings=[ + DAGWarningResponse.model_validate(dag_warning, from_attributes=True) + for dag_warning in dag_warnings + ], + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/serializers/dag_warning.py b/airflow/api_fastapi/core_api/serializers/dag_warning.py new file mode 100644 index 000000000000..f38a3a8d093f --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/dag_warning.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + +from airflow.models.dagwarning import DagWarningType + + +class DAGWarningResponse(BaseModel): + """DAG Warning serializer for responses.""" + + dag_id: str + warning_type: DagWarningType + message: str + timestamp: datetime + + +class DAGWarningCollectionResponse(BaseModel): + """DAG warning collection serializer for responses.""" + + dag_warnings: list[DAGWarningResponse] + total_entries: int diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 07edb67a9930..f715b41c3f4e 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -7,6 +7,7 @@ import { DagRunService, DagService, DagSourceService, + DagWarningService, DagsService, DashboardService, EventLogService, @@ -18,7 +19,7 @@ import { VariableService, VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; export type AssetServiceNextRunAssetsDefaultResponse = Awaited< ReturnType @@ -327,6 +328,34 @@ export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ useMonitorServiceGetHealthKey, ...(queryKey ?? []), ]; +export type DagWarningServiceListDagWarningsDefaultResponse = Awaited< + ReturnType +>; +export type DagWarningServiceListDagWarningsQueryResult< + TData = DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagWarningServiceListDagWarningsKey = + "DagWarningServiceListDagWarnings"; +export const UseDagWarningServiceListDagWarningsKeyFn = ( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: Array, +) => [ + useDagWarningServiceListDagWarningsKey, + ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }]), +]; export type PluginServiceGetPluginsDefaultResponse = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index db61369e19ff..a0f3a75eb22f 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -7,6 +7,7 @@ import { DagRunService, DagService, DagSourceService, + DagWarningService, DagsService, DashboardService, EventLogService, @@ -18,7 +19,7 @@ import { VariableService, VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -408,6 +409,51 @@ export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => queryKey: Common.UseMonitorServiceGetHealthKeyFn(), queryFn: () => MonitorService.getHealth(), }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagWarningServiceListDagWarnings = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ + dagId, + limit, + offset, + orderBy, + warningType, + }), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }), + }); /** * Get Plugins * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 7820656799e5..8ffcee3defb2 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -12,6 +12,7 @@ import { DagRunService, DagService, DagSourceService, + DagWarningService, DagsService, DashboardService, EventLogService, @@ -27,6 +28,7 @@ import { DAGPatchBody, DAGRunPatchBody, DagRunState, + DagWarningType, PoolPatchBody, PoolPostBody, VariableBody, @@ -520,6 +522,54 @@ export const useMonitorServiceGetHealth = < queryFn: () => MonitorService.getHealth() as TData, ...options, }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarnings = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }) as TData, + ...options, + }); /** * Get Plugins * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 2cb0841d71f2..6ceed83349de 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -7,6 +7,7 @@ import { DagRunService, DagService, DagSourceService, + DagWarningService, DagsService, DashboardService, EventLogService, @@ -18,7 +19,7 @@ import { VariableService, VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -508,6 +509,54 @@ export const useMonitorServiceGetHealthSuspense = < queryFn: () => MonitorService.getHealth() as TData, ...options, }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarningsSuspense = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }) as TData, + ...options, + }); /** * Get Plugins * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 712cc8cae984..3f74dc46a8a8 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1130,6 +1130,51 @@ export const $DAGTagCollectionResponse = { description: "DAG Tags Collection serializer for responses.", } as const; +export const $DAGWarningCollectionResponse = { + properties: { + dag_warnings: { + items: { + $ref: "#/components/schemas/DAGWarningResponse", + }, + type: "array", + title: "Dag Warnings", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["dag_warnings", "total_entries"], + title: "DAGWarningCollectionResponse", + description: "DAG warning collection serializer for responses.", +} as const; + +export const $DAGWarningResponse = { + properties: { + dag_id: { + type: "string", + title: "Dag Id", + }, + warning_type: { + $ref: "#/components/schemas/DagWarningType", + }, + message: { + type: "string", + title: "Message", + }, + timestamp: { + type: "string", + format: "date-time", + title: "Timestamp", + }, + }, + type: "object", + required: ["dag_id", "warning_type", "message", "timestamp"], + title: "DAGWarningResponse", + description: "DAG Warning serializer for responses.", +} as const; + export const $DAGWithLatestDagRunsCollectionResponse = { properties: { total_entries: { @@ -1483,6 +1528,16 @@ export const $DagTagPydantic = { "Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API.", } as const; +export const $DagWarningType = { + type: "string", + enum: ["asset conflict", "non-existent pool"], + title: "DagWarningType", + description: `Enum for DAG warning types. + +This is the set of allowable values for the \`\`warning_type\`\` field +in the DagWarning model.`, +} as const; + export const $EventLogResponse = { properties: { event_log_id: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 486e04b056f8..6c42a500e974 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -40,6 +40,8 @@ import type { GetEventLogData, GetEventLogResponse, GetHealthResponse, + ListDagWarningsData, + ListDagWarningsResponse, GetPluginsData, GetPluginsResponse, DeletePoolData, @@ -652,6 +654,41 @@ export class MonitorService { } } +export class DagWarningService { + /** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ + public static listDagWarnings( + data: ListDagWarningsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dagWarnings", + query: { + dag_id: data.dagId, + warning_type: data.warningType, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + export class PluginService { /** * Get Plugins diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 0580694ba78f..4a06652e3802 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -222,6 +222,24 @@ export type DAGTagCollectionResponse = { total_entries: number; }; +/** + * DAG warning collection serializer for responses. + */ +export type DAGWarningCollectionResponse = { + dag_warnings: Array; + total_entries: number; +}; + +/** + * DAG Warning serializer for responses. + */ +export type DAGWarningResponse = { + dag_id: string; + warning_type: DagWarningType; + message: string; + timestamp: string; +}; + /** * DAG with latest dag runs collection response serializer. */ @@ -312,6 +330,14 @@ export type DagTagPydantic = { dag_id: string; }; +/** + * Enum for DAG warning types. + * + * This is the set of allowable values for the ``warning_type`` field + * in the DagWarning model. + */ +export type DagWarningType = "asset conflict" | "non-existent pool"; + /** * Event Log Response. */ @@ -771,6 +797,16 @@ export type GetEventLogResponse = EventLogResponse; export type GetHealthResponse = HealthInfoSchema; +export type ListDagWarningsData = { + dagId?: string | null; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType | null; +}; + +export type ListDagWarningsResponse = DAGWarningCollectionResponse; + export type GetPluginsData = { limit?: number; offset?: number; @@ -1347,6 +1383,29 @@ export type $OpenApiTs = { }; }; }; + "/public/dagWarnings": { + get: { + req: ListDagWarningsData; + res: { + /** + * Successful Response + */ + 200: DAGWarningCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/plugins/": { get: { req: GetPluginsData; diff --git a/tests/api_fastapi/core_api/routes/public/test_dag_warning.py b/tests/api_fastapi/core_api/routes/public/test_dag_warning.py new file mode 100644 index 000000000000..61237bd10299 --- /dev/null +++ b/tests/api_fastapi/core_api/routes/public/test_dag_warning.py @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import pytest + +from airflow.models.dag import DagModel +from airflow.models.dagwarning import DagWarning +from airflow.utils.session import provide_session + +from tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags + +pytestmark = pytest.mark.db_test + +DAG1_ID = "test_dag1" +DAG1_MESSAGE = "test message 1" +DAG2_ID = "test_dag2" +DAG2_MESSAGE = "test message 2" +DAG3_ID = "test_dag3" +DAG3_MESSAGE = "test message 3" +DAG_WARNING_TYPE = "non-existent pool" + + +@pytest.fixture(autouse=True) +@provide_session +def setup(dag_maker, session=None) -> None: + clear_db_dags() + clear_db_dag_warnings() + + session.add(DagModel(dag_id=DAG1_ID)) + session.add(DagModel(dag_id=DAG2_ID)) + session.add(DagModel(dag_id=DAG3_ID)) + session.add(DagWarning(DAG1_ID, DAG_WARNING_TYPE, DAG1_MESSAGE)) + session.add(DagWarning(DAG2_ID, DAG_WARNING_TYPE, DAG2_MESSAGE)) + session.add(DagWarning(DAG3_ID, DAG_WARNING_TYPE, DAG3_MESSAGE)) + session.commit() + + +class TestGetDagWarnings: + @pytest.mark.parametrize( + "query_params, expected_total_entries, expected_messages", + [ + ({}, 3, [DAG1_MESSAGE, DAG2_MESSAGE, DAG3_MESSAGE]), + ({"dag_id": DAG1_ID}, 1, [DAG1_MESSAGE]), + ({"warning_type": DAG_WARNING_TYPE}, 3, [DAG1_MESSAGE, DAG2_MESSAGE, DAG3_MESSAGE]), + ({"limit": 1, "order_by": "message"}, 3, [DAG1_MESSAGE]), + ({"limit": 1, "offset": 1, "order_by": "message"}, 3, [DAG2_MESSAGE]), + ({"limit": 1, "offset": 2, "order_by": "dag_id"}, 3, [DAG3_MESSAGE]), + ({"limit": 1, "offset": 2, "order_by": "-dag_id"}, 3, [DAG1_MESSAGE]), + ({"limit": 1, "order_by": "timestamp"}, 3, [DAG1_MESSAGE]), + ({"limit": 1, "order_by": "-timestamp"}, 3, [DAG3_MESSAGE]), + ({"order_by": "timestamp"}, 3, [DAG1_MESSAGE, DAG2_MESSAGE, DAG3_MESSAGE]), + ({"order_by": "-timestamp"}, 3, [DAG3_MESSAGE, DAG2_MESSAGE, DAG1_MESSAGE]), + ({"order_by": "dag_id"}, 3, [DAG1_MESSAGE, DAG2_MESSAGE, DAG3_MESSAGE]), + ({"order_by": "-dag_id"}, 3, [DAG3_MESSAGE, DAG2_MESSAGE, DAG1_MESSAGE]), + ], + ) + def test_get_dag_warnings(self, test_client, query_params, expected_total_entries, expected_messages): + response = test_client.get("/public/dagWarnings", params=query_params) + assert response.status_code == 200 + response_json = response.json() + assert response_json["total_entries"] == expected_total_entries + assert len(response_json["dag_warnings"]) == len(expected_messages) + assert [dag_warning["message"] for dag_warning in response_json["dag_warnings"]] == expected_messages + + def test_get_dag_warnings_bad_request(self, test_client): + response = test_client.get("/public/dagWarnings", params={"warning_type": "invalid"}) + response_json = response.json() + assert response.status_code == 422 + assert response_json["detail"][0]["msg"] == "Input should be 'asset conflict' or 'non-existent pool'" From 30147ffd7d8730cad018d851cc4d29a47a2673a0 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:06:31 -0800 Subject: [PATCH 018/137] AIP-84 Migrate backfill API to fast api (#43496) --- .../endpoints/backfill_endpoint.py | 183 ---- airflow/api_connexion/openapi/v1.yaml | 149 --- .../api_connexion/schemas/backfill_schema.py | 100 -- .../core_api/openapi/v1-generated.yaml | 870 ++++++++++++------ .../core_api/routes/public/__init__.py | 3 + .../core_api/routes/public/backfills.py | 187 ++++ .../core_api/serializers/backfills.py | 59 ++ airflow/models/backfill.py | 35 +- airflow/ui/openapi-gen/queries/common.ts | 191 ++-- airflow/ui/openapi-gen/queries/prefetch.ts | 182 ++-- airflow/ui/openapi-gen/queries/queries.ts | 500 +++++++--- airflow/ui/openapi-gen/queries/suspense.ts | 235 +++-- .../ui/openapi-gen/requests/schemas.gen.ts | 51 + .../ui/openapi-gen/requests/services.gen.ts | 434 ++++++--- airflow/ui/openapi-gen/requests/types.gen.ts | 421 +++++++-- airflow/www/static/js/types/api-generated.ts | 181 ---- .../endpoints/test_backfill_endpoint.py | 442 --------- .../schemas/test_backfill_schema.py | 55 -- .../core_api/routes/public/test_backfills.py | 309 +++++++ tests/models/test_backfill.py | 36 - 20 files changed, 2597 insertions(+), 2026 deletions(-) delete mode 100644 airflow/api_connexion/endpoints/backfill_endpoint.py delete mode 100644 airflow/api_connexion/schemas/backfill_schema.py create mode 100644 airflow/api_fastapi/core_api/routes/public/backfills.py create mode 100644 airflow/api_fastapi/core_api/serializers/backfills.py delete mode 100644 tests/api_connexion/endpoints/test_backfill_endpoint.py delete mode 100644 tests/api_connexion/schemas/test_backfill_schema.py create mode 100644 tests/api_fastapi/core_api/routes/public/test_backfills.py diff --git a/airflow/api_connexion/endpoints/backfill_endpoint.py b/airflow/api_connexion/endpoints/backfill_endpoint.py deleted file mode 100644 index b53d461dc5d1..000000000000 --- a/airflow/api_connexion/endpoints/backfill_endpoint.py +++ /dev/null @@ -1,183 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -import logging -from functools import wraps -from typing import TYPE_CHECKING - -from flask import request -from marshmallow import ValidationError -from sqlalchemy import select - -from airflow.api_connexion import security -from airflow.api_connexion.exceptions import BadRequest, Conflict, NotFound -from airflow.api_connexion.schemas.backfill_schema import ( - BackfillCollection, - backfill_collection_schema, - backfill_schema, -) -from airflow.models.backfill import ( - AlreadyRunningBackfill, - Backfill, - ReprocessBehavior, - _cancel_backfill, - _create_backfill, -) -from airflow.utils.session import NEW_SESSION, provide_session -from airflow.www.decorators import action_logging - -if TYPE_CHECKING: - from datetime import datetime - - from sqlalchemy.orm import Session - - from airflow.api_connexion.types import APIResponse - -log = logging.getLogger(__name__) - -RESOURCE_EVENT_PREFIX = "dag" - - -def backfill_to_dag(func): - """ - Enrich the request with dag_id. - - :meta private: - """ - - @wraps(func) - def wrapper(*, backfill_id, session, **kwargs): - backfill = session.get(Backfill, backfill_id) - if not backfill: - raise NotFound("Backfill not found") - return func(dag_id=backfill.dag_id, backfill_id=backfill_id, session=session, **kwargs) - - return wrapper - - -@security.requires_access_dag("GET") -@action_logging -@provide_session -def list_backfills(dag_id, session): - backfills = session.scalars(select(Backfill).where(Backfill.dag_id == dag_id)).all() - obj = BackfillCollection( - backfills=backfills, - total_entries=len(backfills), - ) - return backfill_collection_schema.dump(obj) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def pause_backfill(*, backfill_id, session, **kwargs): - br = session.get(Backfill, backfill_id) - if br.completed_at: - raise Conflict("Backfill is already completed.") - if br.is_paused is False: - br.is_paused = True - session.commit() - return backfill_schema.dump(br) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def unpause_backfill(*, backfill_id, session, **kwargs): - br = session.get(Backfill, backfill_id) - if br.completed_at: - raise Conflict("Backfill is already completed.") - if br.is_paused: - br.is_paused = False - session.commit() - return backfill_schema.dump(br) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("GET") -@action_logging -def get_backfill(*, backfill_id: int, session: Session = NEW_SESSION, **kwargs): - backfill = session.get(Backfill, backfill_id) - if backfill: - return backfill_schema.dump(backfill) - raise NotFound("Backfill not found") - - -def backfill_obj_to_kwargs(f): - """ - Convert the request body (containing backfill object json) to kwargs. - - The main point here is to be compatible with the ``requires_access_dag`` decorator, - which takes dag_id kwarg and doesn't support json request body. - """ - - @wraps(f) - def inner(): - body = request.json - try: - obj = backfill_schema.load(body) - except ValidationError as err: - raise BadRequest(detail=str(err.messages)) - return f(**obj) - - return inner - - -@backfill_obj_to_kwargs -@security.requires_access_dag("PUT") -@action_logging -def create_backfill( - dag_id: str, - from_date: datetime, - to_date: datetime, - max_active_runs: int = 10, - reverse: bool = False, - dag_run_conf: dict | None = None, - reprocess_behavior: ReprocessBehavior | None = None, -) -> APIResponse: - try: - backfill_obj = _create_backfill( - dag_id=dag_id, - from_date=from_date, - to_date=to_date, - max_active_runs=max_active_runs, - reverse=reverse, - dag_run_conf=dag_run_conf, - reprocess_behavior=reprocess_behavior, - ) - return backfill_schema.dump(backfill_obj) - except AlreadyRunningBackfill: - raise Conflict(f"There is already a running backfill for dag {dag_id}") - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def cancel_backfill( - *, - backfill_id, - session: Session = NEW_SESSION, # used by backfill_to_dag decorator - **kwargs, -): - br = _cancel_backfill(backfill_id=backfill_id) - return backfill_schema.dump(br) diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index 0ec0a3cdcb68..c884c1595411 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -245,155 +245,6 @@ servers: description: Apache Airflow Stable API. paths: - # Database entities - /backfills: - get: - summary: List backfills - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: list_backfills - tags: [Backfill] - parameters: - - name: dag_id - in: query - schema: - type: string - required: true - description: | - List backfills for this dag. - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/BackfillCollection" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - - post: - summary: Create a backfill job. - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: create_backfill - tags: [Backfill] - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "400": - $ref: "#/components/responses/BadRequest" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - - /backfills/{backfill_id}: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - get: - summary: Get a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: get_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - - /backfills/{backfill_id}/pause: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Pause a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: pause_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - - /backfills/{backfill_id}/unpause: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Pause a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: unpause_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - - /backfills/{backfill_id}/cancel: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Cancel a backfill - description: | - When a backfill is cancelled, all queued dag runs will be marked as failed. - Running dag runs will be allowed to continue. - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: cancel_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - # Database entities /connections: get: diff --git a/airflow/api_connexion/schemas/backfill_schema.py b/airflow/api_connexion/schemas/backfill_schema.py deleted file mode 100644 index 9f7198469157..000000000000 --- a/airflow/api_connexion/schemas/backfill_schema.py +++ /dev/null @@ -1,100 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import typing -from typing import NamedTuple - -from marshmallow import Schema, fields, utils, validate -from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field - -from airflow.models.backfill import Backfill, BackfillDagRun, ReprocessBehavior - - -class ReprocessBehaviorField(fields.String): - """Schema for ReprocessBehavior enum.""" - - def __init__(self, **metadata): - super().__init__(**metadata) - self.validators = [validate.OneOf(ReprocessBehavior), *self.validators] - - def _serialize(self, value, attr, obj, **kwargs) -> str | None: - if value is None: - return None - return utils.ensure_text_type(ReprocessBehavior(value).value) - - def _deserialize(self, value, attr, data, **kwargs) -> typing.Any: - deser = super()._deserialize(value, attr, data, **kwargs) - if not deser: - return None - return ReprocessBehavior(deser) - - -class BackfillSchema(SQLAlchemySchema): - """Backfill Schema.""" - - class Meta: - """Meta.""" - - model = Backfill - - id = auto_field(dump_only=True) - dag_id = auto_field() - from_date = auto_field() - to_date = auto_field() - dag_run_conf = fields.Dict(allow_none=True) - reverse = fields.Boolean() - is_paused = auto_field() - reprocess_behavior = ReprocessBehaviorField() - max_active_runs = auto_field() - created_at = auto_field() - completed_at = auto_field() - updated_at = auto_field() - - -class BackfillDagRunSchema(SQLAlchemySchema): - """Trigger Schema.""" - - class Meta: - """Meta.""" - - model = BackfillDagRun - - id = auto_field(dump_only=True) - backfill_id = auto_field(dump_only=True) - dag_run_id = auto_field(dump_only=True) - sort_ordinal = auto_field(dump_only=True) - - -class BackfillCollection(NamedTuple): - """List of Backfills with meta.""" - - backfills: list[Backfill] - total_entries: int - - -class BackfillCollectionSchema(Schema): - """Backfill Collection Schema.""" - - backfills = fields.List(fields.Nested(BackfillSchema)) - total_entries = fields.Int() - - -backfill_schema = BackfillSchema() -backfill_dag_run_schema = BackfillDagRunSchema() -backfill_collection_schema = BackfillCollectionSchema() diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 19a4875e704d..a82e34a3f5ee 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -170,67 +170,46 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/connections/{connection_id}: - delete: + /public/backfills/: + get: tags: - - Connection - summary: Delete Connection - description: Delete a connection entry. - operationId: delete_connection + - Backfill + summary: List Backfills + operationId: list_backfills parameters: - - name: connection_id - in: path + - name: dag_id + in: query required: true schema: type: string - title: Connection Id - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - get: - tags: - - Connection - summary: Get Connection - description: Get a connection entry. - operationId: get_connection - parameters: - - name: connection_id - in: path - required: true + title: Dag Id + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false schema: type: string - title: Connection Id + default: id + title: Order By responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/ConnectionResponse' + schema: {} '401': content: application/json: @@ -243,54 +222,29 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/connections/: - get: + post: tags: - - Connection - summary: Get Connections - description: Get all connection entries. - operationId: get_connections - parameters: - - name: limit - in: query - required: false - schema: - type: integer - default: 100 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By + - Backfill + summary: Create Backfill + operationId: create_backfill + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillPostBody' responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/ConnectionCollectionResponse' + schema: {} '401': content: application/json: @@ -309,38 +263,37 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}/dagRuns/{dag_run_id}: + /public/backfills/{backfill_id}: get: tags: - - DagRun - summary: Get Dag Run - operationId: get_dag_run + - Backfill + summary: Get Backfill + operationId: get_backfill parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id + - name: backfill_id in: path required: true schema: type: string - title: Dag Run Id + title: Backfill Id responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' + schema: {} '401': content: application/json: @@ -365,34 +318,24 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - delete: + /public/backfills/{backfill_id}/pause: + put: tags: - - DagRun - summary: Delete Dag Run - description: Delete a DAG Run entry. - operationId: delete_dag_run + - Backfill + summary: Pause Backfill + operationId: pause_backfill parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id + - name: backfill_id in: path required: true schema: - type: string - title: Dag Run Id + title: Backfill Id responses: - '204': + '200': description: Successful Response - '400': content: application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -411,60 +354,36 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - patch: + /public/backfills/{backfill_id}/unpause: + put: tags: - - DagRun - summary: Patch Dag Run State - description: Modify a DAG Run. - operationId: patch_dag_run_state + - Backfill + summary: Unpause Backfill + operationId: unpause_backfill parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id + - name: backfill_id in: path required: true schema: - type: string - title: Dag Run Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunPatchBody' + title: Backfill Id responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -483,50 +402,36 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dagSources/{file_token}: - get: + /public/backfills/{backfill_id}/cancel: + put: tags: - - DagSource - summary: Get Dag Source - description: Get source code using file token. - operationId: get_dag_source + - Backfill + summary: Cancel Backfill + operationId: cancel_backfill parameters: - - name: file_token + - name: backfill_id in: path required: true schema: - type: string - title: File Token - - name: accept - in: header - required: false - schema: - type: string - default: '*/*' - title: Accept + title: Backfill Id responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGSourceResponse' - text/plain: - schema: - type: string - example: dag code - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -545,12 +450,12 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '406': + '409': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Acceptable + description: Conflict '422': description: Validation Error content: @@ -734,23 +639,387 @@ paths: title: Last Dag Run State requestBody: required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' + content: + application/json: + schema: + $ref: '#/components/schemas/DAGPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGCollectionResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/tags: + get: + tags: + - DAG + summary: Get Dag Tags + description: Get all DAG tags. + operationId: get_dag_tags + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: name + title: Order By + - name: tag_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Tag Name Pattern + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGTagCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}: + get: + tags: + - DAG + summary: Get Dag + description: Get basic information about a DAG. + operationId: get_dag + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + patch: + tags: + - DAG + summary: Patch Dag + description: Patch the specific DAG. + operationId: patch_dag + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - DAG + summary: Delete Dag + description: Delete the specific DAG. + operationId: delete_dag + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + /public/dags/{dag_id}/details: + get: + tags: + - DAG + summary: Get Dag Details + description: Get details of DAG. + operationId: get_dag_details + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGDetailsResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + /public/connections/{connection_id}: + delete: + tags: + - Connection + summary: Delete Connection + description: Delete a connection entry. + operationId: delete_connection + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id responses: - '200': + '204': description: Successful Response + '401': content: application/json: schema: - $ref: '#/components/schemas/DAGCollectionResponse' - '400': + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Connection + summary: Get Connection + description: Get a connection entry. + operationId: get_connection + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' '401': content: application/json: @@ -775,13 +1044,13 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/tags: + /public/connections/: get: tags: - - DAG - summary: Get Dag Tags - description: Get all DAG tags. - operationId: get_dag_tags + - Connection + summary: Get Connections + description: Get all connection entries. + operationId: get_connections parameters: - name: limit in: query @@ -802,23 +1071,15 @@ paths: required: false schema: type: string - default: name + default: id title: Order By - - name: tag_name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Tag Name Pattern responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/DAGTagCollectionResponse' + $ref: '#/components/schemas/ConnectionCollectionResponse' '401': content: application/json: @@ -831,19 +1092,24 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}: + /public/dags/{dag_id}/dagRuns/{dag_run_id}: get: tags: - - DAG - summary: Get Dag - description: Get basic information about a DAG. - operationId: get_dag + - DagRun + summary: Get Dag Run + operationId: get_dag_run parameters: - name: dag_id in: path @@ -851,19 +1117,19 @@ paths: schema: type: string title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/DAGResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + $ref: '#/components/schemas/DAGRunResponse' '401': content: application/json: @@ -883,17 +1149,17 @@ paths: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found '422': + description: Validation Error content: application/json: schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - patch: + $ref: '#/components/schemas/HTTPValidationError' + delete: tags: - - DAG - summary: Patch Dag - description: Patch the specific DAG. - operationId: patch_dag + - DagRun + summary: Delete Dag Run + description: Delete a DAG Run entry. + operationId: delete_dag_run parameters: - name: dag_id in: path @@ -901,29 +1167,15 @@ paths: schema: type: string title: Dag Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: + - name: dag_run_id + in: path required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' + schema: + type: string + title: Dag Run Id responses: - '200': + '204': description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGResponse' '400': content: application/json: @@ -954,12 +1206,12 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - delete: + patch: tags: - - DAG - summary: Delete Dag - description: Delete the specific DAG. - operationId: delete_dag + - DagRun + summary: Patch Dag Run State + description: Modify a DAG Run. + operationId: patch_dag_run_state parameters: - name: dag_id in: path @@ -967,12 +1219,35 @@ paths: schema: type: string title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunPatchBody' responses: '200': description: Successful Response content: application/json: - schema: {} + schema: + $ref: '#/components/schemas/DAGRunResponse' '400': content: application/json: @@ -998,32 +1273,43 @@ paths: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found '422': + description: Validation Error content: application/json: schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - /public/dags/{dag_id}/details: + $ref: '#/components/schemas/HTTPValidationError' + /public/dagSources/{file_token}: get: tags: - - DAG - summary: Get Dag Details - description: Get details of DAG. - operationId: get_dag_details + - DagSource + summary: Get Dag Source + description: Get source code using file token. + operationId: get_dag_source parameters: - - name: dag_id + - name: file_token in: path required: true schema: type: string - title: Dag Id + title: File Token + - name: accept + in: header + required: false + schema: + type: string + default: '*/*' + title: Accept responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/DAGDetailsResponse' + $ref: '#/components/schemas/DAGSourceResponse' + text/plain: + schema: + type: string + example: dag code '400': content: application/json: @@ -1048,12 +1334,18 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '422': + '406': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity + description: Not Acceptable + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/eventLogs/{event_log_id}: get: tags: @@ -1929,6 +2221,41 @@ components: type: object title: AppBuilderViewResponse description: Serializer for AppBuilder View responses. + BackfillPostBody: + properties: + dag_id: + type: string + title: Dag Id + from_date: + type: string + format: date-time + title: From Date + to_date: + type: string + format: date-time + title: To Date + run_backwards: + type: boolean + title: Run Backwards + default: false + dag_run_conf: + type: object + title: Dag Run Conf + default: {} + reprocess_behavior: + $ref: '#/components/schemas/ReprocessBehavior' + default: none + max_active_runs: + type: integer + title: Max Active Runs + default: 10 + type: object + required: + - dag_id + - from_date + - to_date + title: BackfillPostBody + description: Object used for create backfill request. BaseInfoSchema: properties: status: @@ -3309,6 +3636,17 @@ components: - version title: ProviderResponse description: Provider serializer for responses. + ReprocessBehavior: + type: string + enum: + - failed + - completed + - none + title: ReprocessBehavior + description: 'Internal enum for setting reprocess behavior in a backfill. + + + :meta private:' SchedulerInfoSchema: properties: status: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index a153952287bc..a443f5a28ae8 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -18,6 +18,7 @@ from __future__ import annotations from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.routes.public.backfills import backfills_router from airflow.api_fastapi.core_api.routes.public.connections import connections_router from airflow.api_fastapi.core_api.routes.public.dag_run import dag_run_router from airflow.api_fastapi.core_api.routes.public.dag_sources import dag_sources_router @@ -35,6 +36,8 @@ public_router = AirflowRouter(prefix="/public") +public_router.include_router(backfills_router) +public_router.include_router(dags_router) public_router.include_router(connections_router) public_router.include_router(dag_run_router) public_router.include_router(dag_sources_router) diff --git a/airflow/api_fastapi/core_api/routes/public/backfills.py b/airflow/api_fastapi/core_api/routes/public/backfills.py new file mode 100644 index 000000000000..f6fe531d0063 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/backfills.py @@ -0,0 +1,187 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy import select, update +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import ( + create_openapi_http_exception_doc, +) +from airflow.api_fastapi.core_api.serializers.backfills import ( + BackfillCollectionResponse, + BackfillPostBody, + BackfillResponse, +) +from airflow.models import DagRun +from airflow.models.backfill import ( + AlreadyRunningBackfill, + Backfill, + BackfillDagRun, + _create_backfill, +) +from airflow.utils import timezone +from airflow.utils.state import DagRunState + +backfills_router = AirflowRouter(tags=["Backfill"], prefix="/backfills") + + +@backfills_router.get( + path="/", + responses=create_openapi_http_exception_doc([401, 403]), +) +async def list_backfills( + dag_id: str, + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends(SortParam(["id"], Backfill).dynamic_depends()), + ], + session: Annotated[Session, Depends(get_session)], +): + select_stmt, total_entries = paginated_select( + select(Backfill).where(Backfill.dag_id == dag_id), + [], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + backfills = session.scalars(select_stmt).all() + + return BackfillCollectionResponse( + backfills=[BackfillResponse.model_validate(x, from_attributes=True) for x in backfills], + total_entries=len(backfills), + ) + + +@backfills_router.get( + path="/{backfill_id}", + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def get_backfill( + backfill_id: str, + session: Annotated[Session, Depends(get_session)], +): + backfill = session.get(Backfill, backfill_id) + if backfill: + return BackfillResponse.model_validate(backfill, from_attributes=True) + raise HTTPException(404, "Backfill not found") + + +@backfills_router.put( + path="/{backfill_id}/pause", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def pause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at: + raise HTTPException(409, "Backfill is already completed.") + if b.is_paused is False: + b.is_paused = True + session.commit() + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.put( + path="/{backfill_id}/unpause", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def unpause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at: + raise HTTPException(409, "Backfill is already completed.") + if b.is_paused: + b.is_paused = False + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.put( + path="/{backfill_id}/cancel", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def cancel_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b: Backfill = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at is not None: + raise HTTPException(409, "Backfill is already completed.") + + # first, pause, and commit immediately to ensure no other dag runs are started + if not b.is_paused: + b.is_paused = True + session.commit() # ensure no new runs started + + query = ( + update(DagRun) + .where( + DagRun.id.in_( + select( + BackfillDagRun.dag_run_id, + ).where( + BackfillDagRun.backfill_id == b.id, + ), + ), + DagRun.state == DagRunState.QUEUED, + ) + .values(state=DagRunState.FAILED) + .execution_options(synchronize_session=False) + ) + session.execute(query) + session.commit() # this will fail all the queued dag runs in this backfill + + # this is in separate transaction just to avoid potential conflicts + session.refresh(b) + b.completed_at = timezone.utcnow() + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.post( + path="/", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def create_backfill( + backfill_request: BackfillPostBody, +): + from_date = timezone.coerce_datetime(backfill_request.from_date) + to_date = timezone.coerce_datetime(backfill_request.to_date) + try: + backfill_obj = _create_backfill( + dag_id=backfill_request.dag_id, + from_date=from_date, + to_date=to_date, + max_active_runs=backfill_request.max_active_runs, + reverse=backfill_request.run_backwards, + dag_run_conf=backfill_request.dag_run_conf, + reprocess_behavior=backfill_request.reprocess_behavior, + ) + return BackfillResponse.model_validate(backfill_obj, from_attributes=True) + except AlreadyRunningBackfill: + raise HTTPException( + status_code=409, + detail=f"There is already a running backfill for dag {backfill_request.dag_id}", + ) diff --git a/airflow/api_fastapi/core_api/serializers/backfills.py b/airflow/api_fastapi/core_api/serializers/backfills.py new file mode 100644 index 000000000000..69d6a98ccfd1 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/backfills.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + +from airflow.models.backfill import ReprocessBehavior + + +class BackfillPostBody(BaseModel): + """Object used for create backfill request.""" + + dag_id: str + from_date: datetime + to_date: datetime + run_backwards: bool = False + dag_run_conf: dict = {} + reprocess_behavior: ReprocessBehavior = ReprocessBehavior.NONE + max_active_runs: int = 10 + + +class BackfillResponse(BaseModel): + """Base serializer for Backfill.""" + + id: int + dag_id: str + from_date: datetime + to_date: datetime + dag_run_conf: dict + is_paused: bool + reprocess_behavior: ReprocessBehavior + max_active_runs: int + created_at: datetime + completed_at: datetime | None + updated_at: datetime + + +class BackfillCollectionResponse(BaseModel): + """Backfill Collection serializer for responses.""" + + backfills: list[BackfillResponse] + total_entries: int diff --git a/airflow/models/backfill.py b/airflow/models/backfill.py index 72df6a1e0eb7..648b35c5bdeb 100644 --- a/airflow/models/backfill.py +++ b/airflow/models/backfill.py @@ -36,12 +36,11 @@ desc, func, select, - update, ) from sqlalchemy.orm import relationship, validates from sqlalchemy_jsonfield import JSONField -from airflow.api_connexion.exceptions import Conflict, NotFound +from airflow.api_connexion.exceptions import NotFound from airflow.exceptions import AirflowException from airflow.models.base import Base, StringID from airflow.settings import json @@ -87,7 +86,7 @@ class Backfill(Base): dag_id = Column(StringID(), nullable=False) from_date = Column(UtcDateTime, nullable=False) to_date = Column(UtcDateTime, nullable=False) - dag_run_conf = Column(JSONField(json=json), nullable=True) + dag_run_conf = Column(JSONField(json=json), nullable=False, default={}) is_paused = Column(Boolean, default=False) """ Controls whether new dag runs will be created for this backfill. @@ -333,33 +332,3 @@ def _create_backfill( info, ) return br - - -def _cancel_backfill(backfill_id) -> Backfill: - with create_session() as session: - b: Backfill = session.get(Backfill, backfill_id) - if b.completed_at is not None: - raise Conflict("Backfill is already completed.") - - b.completed_at = timezone.utcnow() - - # first, pause - if not b.is_paused: - b.is_paused = True - - session.commit() - - from airflow.models import DagRun - - # now, let's mark all queued dag runs as failed - query = ( - update(DagRun) - .where( - DagRun.id.in_(select(BackfillDagRun.dag_run_id).where(BackfillDagRun.backfill_id == b.id)), - DagRun.state == DagRunState.QUEUED, - ) - .values(state=DagRunState.FAILED) - .execution_options(synchronize_session=False) - ) - session.execute(query) - return b diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index f715b41c3f4e..75a343ce74c7 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -3,6 +3,7 @@ import { UseQueryResult } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, @@ -109,90 +110,48 @@ export const UseDagsServiceRecentDagRunsKeyFn = ( }, ]), ]; -export type ConnectionServiceGetConnectionDefaultResponse = Awaited< - ReturnType +export type BackfillServiceListBackfillsDefaultResponse = Awaited< + ReturnType >; -export type ConnectionServiceGetConnectionQueryResult< - TData = ConnectionServiceGetConnectionDefaultResponse, +export type BackfillServiceListBackfillsQueryResult< + TData = BackfillServiceListBackfillsDefaultResponse, TError = unknown, > = UseQueryResult; -export const useConnectionServiceGetConnectionKey = - "ConnectionServiceGetConnection"; -export const UseConnectionServiceGetConnectionKeyFn = ( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: Array, -) => [ - useConnectionServiceGetConnectionKey, - ...(queryKey ?? [{ connectionId }]), -]; -export type ConnectionServiceGetConnectionsDefaultResponse = Awaited< - ReturnType ->; -export type ConnectionServiceGetConnectionsQueryResult< - TData = ConnectionServiceGetConnectionsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useConnectionServiceGetConnectionsKey = - "ConnectionServiceGetConnections"; -export const UseConnectionServiceGetConnectionsKeyFn = ( +export const useBackfillServiceListBackfillsKey = + "BackfillServiceListBackfills"; +export const UseBackfillServiceListBackfillsKeyFn = ( { + dagId, limit, offset, orderBy, }: { + dagId: string; limit?: number; offset?: number; orderBy?: string; - } = {}, + }, queryKey?: Array, ) => [ - useConnectionServiceGetConnectionsKey, - ...(queryKey ?? [{ limit, offset, orderBy }]), + useBackfillServiceListBackfillsKey, + ...(queryKey ?? [{ dagId, limit, offset, orderBy }]), ]; -export type DagRunServiceGetDagRunDefaultResponse = Awaited< - ReturnType ->; -export type DagRunServiceGetDagRunQueryResult< - TData = DagRunServiceGetDagRunDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useDagRunServiceGetDagRunKey = "DagRunServiceGetDagRun"; -export const UseDagRunServiceGetDagRunKeyFn = ( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: Array, -) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; -export type DagSourceServiceGetDagSourceDefaultResponse = Awaited< - ReturnType +export type BackfillServiceGetBackfillDefaultResponse = Awaited< + ReturnType >; -export type DagSourceServiceGetDagSourceQueryResult< - TData = DagSourceServiceGetDagSourceDefaultResponse, +export type BackfillServiceGetBackfillQueryResult< + TData = BackfillServiceGetBackfillDefaultResponse, TError = unknown, > = UseQueryResult; -export const useDagSourceServiceGetDagSourceKey = - "DagSourceServiceGetDagSource"; -export const UseDagSourceServiceGetDagSourceKeyFn = ( +export const useBackfillServiceGetBackfillKey = "BackfillServiceGetBackfill"; +export const UseBackfillServiceGetBackfillKeyFn = ( { - accept, - fileToken, + backfillId, }: { - accept?: string; - fileToken: string; + backfillId: string; }, queryKey?: Array, -) => [ - useDagSourceServiceGetDagSourceKey, - ...(queryKey ?? [{ accept, fileToken }]), -]; +) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; export type DagServiceGetDagsDefaultResponse = Awaited< ReturnType >; @@ -300,6 +259,90 @@ export const UseDagServiceGetDagDetailsKeyFn = ( }, queryKey?: Array, ) => [useDagServiceGetDagDetailsKey, ...(queryKey ?? [{ dagId }])]; +export type ConnectionServiceGetConnectionDefaultResponse = Awaited< + ReturnType +>; +export type ConnectionServiceGetConnectionQueryResult< + TData = ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useConnectionServiceGetConnectionKey = + "ConnectionServiceGetConnection"; +export const UseConnectionServiceGetConnectionKeyFn = ( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: Array, +) => [ + useConnectionServiceGetConnectionKey, + ...(queryKey ?? [{ connectionId }]), +]; +export type ConnectionServiceGetConnectionsDefaultResponse = Awaited< + ReturnType +>; +export type ConnectionServiceGetConnectionsQueryResult< + TData = ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useConnectionServiceGetConnectionsKey = + "ConnectionServiceGetConnections"; +export const UseConnectionServiceGetConnectionsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [ + useConnectionServiceGetConnectionsKey, + ...(queryKey ?? [{ limit, offset, orderBy }]), +]; +export type DagRunServiceGetDagRunDefaultResponse = Awaited< + ReturnType +>; +export type DagRunServiceGetDagRunQueryResult< + TData = DagRunServiceGetDagRunDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagRunServiceGetDagRunKey = "DagRunServiceGetDagRun"; +export const UseDagRunServiceGetDagRunKeyFn = ( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: Array, +) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export type DagSourceServiceGetDagSourceDefaultResponse = Awaited< + ReturnType +>; +export type DagSourceServiceGetDagSourceQueryResult< + TData = DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagSourceServiceGetDagSourceKey = + "DagSourceServiceGetDagSource"; +export const UseDagSourceServiceGetDagSourceKeyFn = ( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: Array, +) => [ + useDagSourceServiceGetDagSourceKey, + ...(queryKey ?? [{ accept, fileToken }]), +]; export type EventLogServiceGetEventLogDefaultResponse = Awaited< ReturnType >; @@ -529,14 +572,23 @@ export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ useVersionServiceGetVersionKey, ...(queryKey ?? []), ]; +export type BackfillServiceCreateBackfillMutationResult = Awaited< + ReturnType +>; export type PoolServicePostPoolMutationResult = Awaited< ReturnType >; export type VariableServicePostVariableMutationResult = Awaited< ReturnType >; -export type DagRunServicePatchDagRunStateMutationResult = Awaited< - ReturnType +export type BackfillServicePauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceUnpauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceCancelBackfillMutationResult = Awaited< + ReturnType >; export type DagServicePatchDagsMutationResult = Awaited< ReturnType @@ -544,21 +596,24 @@ export type DagServicePatchDagsMutationResult = Awaited< export type DagServicePatchDagMutationResult = Awaited< ReturnType >; +export type DagRunServicePatchDagRunStateMutationResult = Awaited< + ReturnType +>; export type PoolServicePatchPoolMutationResult = Awaited< ReturnType >; export type VariableServicePatchVariableMutationResult = Awaited< ReturnType >; +export type DagServiceDeleteDagMutationResult = Awaited< + ReturnType +>; export type ConnectionServiceDeleteConnectionMutationResult = Awaited< ReturnType >; export type DagRunServiceDeleteDagRunMutationResult = Awaited< ReturnType >; -export type DagServiceDeleteDagMutationResult = Awaited< - ReturnType ->; export type PoolServiceDeletePoolMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index a0f3a75eb22f..63e8d4b43132 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -3,6 +3,7 @@ import { type QueryClient } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, @@ -138,102 +139,57 @@ export const prefetchUseDagsServiceRecentDagRuns = ( }), }); /** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseConnectionServiceGetConnection = ( - queryClient: QueryClient, - { - connectionId, - }: { - connectionId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), - queryFn: () => ConnectionService.getConnection({ connectionId }), - }); -/** - * Get Connections - * Get all connection entries. + * List Backfills * @param data The data for the request. + * @param data.dagId * @param data.limit * @param data.offset * @param data.orderBy - * @returns ConnectionCollectionResponse Successful Response + * @returns unknown Successful Response * @throws ApiError */ -export const prefetchUseConnectionServiceGetConnections = ( +export const prefetchUseBackfillServiceListBackfills = ( queryClient: QueryClient, { + dagId, limit, offset, orderBy, }: { + dagId: string; limit?: number; offset?: number; orderBy?: string; - } = {}, + }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ + dagId, limit, offset, orderBy, }), - queryFn: () => ConnectionService.getConnections({ limit, offset, orderBy }), - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagRunServiceGetDagRun = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }), }); /** - * Get Dag Source - * Get source code using file token. + * Get Backfill * @param data The data for the request. - * @param data.fileToken - * @param data.accept - * @returns DAGSourceResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const prefetchUseDagSourceServiceGetDagSource = ( +export const prefetchUseBackfillServiceGetBackfill = ( queryClient: QueryClient, { - accept, - fileToken, + backfillId, }: { - accept?: string; - fileToken: string; + backfillId: string; }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ - accept, - fileToken, - }), - queryFn: () => DagSourceService.getDagSource({ accept, fileToken }), + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), + queryFn: () => BackfillService.getBackfill({ backfillId }), }); /** * Get Dags @@ -380,6 +336,104 @@ export const prefetchUseDagServiceGetDagDetails = ( queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), queryFn: () => DagService.getDagDetails({ dagId }), }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseConnectionServiceGetConnection = ( + queryClient: QueryClient, + { + connectionId, + }: { + connectionId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), + queryFn: () => ConnectionService.getConnection({ connectionId }), + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseConnectionServiceGetConnections = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ + limit, + offset, + orderBy, + }), + queryFn: () => ConnectionService.getConnections({ limit, offset, orderBy }), + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetDagRun = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagSourceServiceGetDagSource = ( + queryClient: QueryClient, + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ + accept, + fileToken, + }), + queryFn: () => DagSourceService.getDagSource({ accept, fileToken }), + }); /** * Get Event Log * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 8ffcee3defb2..5f8f649372db 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -8,6 +8,7 @@ import { import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, @@ -25,6 +26,7 @@ import { VersionService, } from "../requests/services.gen"; import { + BackfillPostBody, DAGPatchBody, DAGRunPatchBody, DagRunState, @@ -173,132 +175,69 @@ export const useDagsServiceRecentDagRuns = < ...options, }); /** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnection = < - TData = Common.ConnectionServiceGetConnectionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn( - { connectionId }, - queryKey, - ), - queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, - ...options, - }); -/** - * Get Connections - * Get all connection entries. + * List Backfills * @param data The data for the request. + * @param data.dagId * @param data.limit * @param data.offset * @param data.orderBy - * @returns ConnectionCollectionResponse Successful Response + * @returns unknown Successful Response * @throws ApiError */ -export const useConnectionServiceGetConnections = < - TData = Common.ConnectionServiceGetConnectionsDefaultResponse, +export const useBackfillServiceListBackfills = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { + dagId, limit, offset, orderBy, }: { + dagId: string; limit?: number; offset?: number; orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( - { limit, offset, orderBy }, - queryKey, - ), - queryFn: () => - ConnectionService.getConnections({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRun = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn( - { dagId, dagRunId }, + queryKey: Common.UseBackfillServiceListBackfillsKeyFn( + { dagId, limit, offset, orderBy }, queryKey, ), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options, }); /** - * Get Dag Source - * Get source code using file token. + * Get Backfill * @param data The data for the request. - * @param data.fileToken - * @param data.accept - * @returns DAGSourceResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useDagSourceServiceGetDagSource = < - TData = Common.DagSourceServiceGetDagSourceDefaultResponse, +export const useBackfillServiceGetBackfill = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - accept, - fileToken, + backfillId, }: { - accept?: string; - fileToken: string; + backfillId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( - { accept, fileToken }, + queryKey: Common.UseBackfillServiceGetBackfillKeyFn( + { backfillId }, queryKey, ), - queryFn: () => - DagSourceService.getDagSource({ accept, fileToken }) as TData, + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options, }); /** @@ -476,6 +415,135 @@ export const useDagServiceGetDagDetails = < queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options, }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnection = < + TData = Common.ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn( + { connectionId }, + queryKey, + ), + queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, + ...options, + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnections = < + TData = Common.ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ConnectionService.getConnections({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRun = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSource = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( + { accept, fileToken }, + queryKey, + ), + queryFn: () => + DagSourceService.getDagSource({ accept, fileToken }) as TData, + ...options, + }); /** * Get Event Log * @param data The data for the request. @@ -858,6 +926,44 @@ export const useVersionServiceGetVersion = < queryFn: () => VersionService.getVersion() as TData, ...options, }); +/** + * Create Backfill + * @param data The data for the request. + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceCreateBackfill = < + TData = Common.BackfillServiceCreateBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + BackfillService.createBackfill({ + requestBody, + }) as unknown as Promise, + ...options, + }); /** * Post Pool * Create a Pool. @@ -935,18 +1041,14 @@ export const useVariableServicePostVariable = < ...options, }); /** - * Patch Dag Run State - * Modify a DAG Run. + * Pause Backfill * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @param data.updateMask - * @returns DAGRunResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useDagRunServicePatchDagRunState = < - TData = Common.DagRunServicePatchDagRunStateMutationResult, +export const useBackfillServicePauseBackfill = < + TData = Common.BackfillServicePauseBackfillMutationResult, TError = unknown, TContext = unknown, >( @@ -955,10 +1057,7 @@ export const useDagRunServicePatchDagRunState = < TData, TError, { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: string[]; + backfillId: unknown; }, TContext >, @@ -969,19 +1068,89 @@ export const useDagRunServicePatchDagRunState = < TData, TError, { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: string[]; + backfillId: unknown; }, TContext >({ - mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => - DagRunService.patchDagRunState({ - dagId, - dagRunId, - requestBody, - updateMask, + mutationFn: ({ backfillId }) => + BackfillService.pauseBackfill({ + backfillId, + }) as unknown as Promise, + ...options, + }); +/** + * Unpause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceUnpauseBackfill = < + TData = Common.BackfillServiceUnpauseBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.unpauseBackfill({ + backfillId, + }) as unknown as Promise, + ...options, + }); +/** + * Cancel Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceCancelBackfill = < + TData = Common.BackfillServiceCancelBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.cancelBackfill({ + backfillId, }) as unknown as Promise, ...options, }); @@ -1118,6 +1287,57 @@ export const useDagServicePatchDag = < }) as unknown as Promise, ...options, }); +/** + * Patch Dag Run State + * Modify a DAG Run. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @param data.updateMask + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServicePatchDagRunState = < + TData = Common.DagRunServicePatchDagRunStateMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => + DagRunService.patchDagRunState({ + dagId, + dagRunId, + requestBody, + updateMask, + }) as unknown as Promise, + ...options, + }); /** * Patch Pool * Update a Pool. @@ -1213,15 +1433,15 @@ export const useVariableServicePatchVariable = < ...options, }); /** - * Delete Connection - * Delete a connection entry. + * Delete Dag + * Delete the specific DAG. * @param data The data for the request. - * @param data.connectionId - * @returns void Successful Response + * @param data.dagId + * @returns unknown Successful Response * @throws ApiError */ -export const useConnectionServiceDeleteConnection = < - TData = Common.ConnectionServiceDeleteConnectionMutationResult, +export const useDagServiceDeleteDag = < + TData = Common.DagServiceDeleteDagMutationResult, TError = unknown, TContext = unknown, >( @@ -1230,7 +1450,7 @@ export const useConnectionServiceDeleteConnection = < TData, TError, { - connectionId: string; + dagId: string; }, TContext >, @@ -1241,27 +1461,24 @@ export const useConnectionServiceDeleteConnection = < TData, TError, { - connectionId: string; + dagId: string; }, TContext >({ - mutationFn: ({ connectionId }) => - ConnectionService.deleteConnection({ - connectionId, - }) as unknown as Promise, + mutationFn: ({ dagId }) => + DagService.deleteDag({ dagId }) as unknown as Promise, ...options, }); /** - * Delete Dag Run - * Delete a DAG Run entry. + * Delete Connection + * Delete a connection entry. * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId + * @param data.connectionId * @returns void Successful Response * @throws ApiError */ -export const useDagRunServiceDeleteDagRun = < - TData = Common.DagRunServiceDeleteDagRunMutationResult, +export const useConnectionServiceDeleteConnection = < + TData = Common.ConnectionServiceDeleteConnectionMutationResult, TError = unknown, TContext = unknown, >( @@ -1270,8 +1487,7 @@ export const useDagRunServiceDeleteDagRun = < TData, TError, { - dagId: string; - dagRunId: string; + connectionId: string; }, TContext >, @@ -1282,28 +1498,27 @@ export const useDagRunServiceDeleteDagRun = < TData, TError, { - dagId: string; - dagRunId: string; + connectionId: string; }, TContext >({ - mutationFn: ({ dagId, dagRunId }) => - DagRunService.deleteDagRun({ - dagId, - dagRunId, + mutationFn: ({ connectionId }) => + ConnectionService.deleteConnection({ + connectionId, }) as unknown as Promise, ...options, }); /** - * Delete Dag - * Delete the specific DAG. + * Delete Dag Run + * Delete a DAG Run entry. * @param data The data for the request. * @param data.dagId - * @returns unknown Successful Response + * @param data.dagRunId + * @returns void Successful Response * @throws ApiError */ -export const useDagServiceDeleteDag = < - TData = Common.DagServiceDeleteDagMutationResult, +export const useDagRunServiceDeleteDagRun = < + TData = Common.DagRunServiceDeleteDagRunMutationResult, TError = unknown, TContext = unknown, >( @@ -1313,6 +1528,7 @@ export const useDagServiceDeleteDag = < TError, { dagId: string; + dagRunId: string; }, TContext >, @@ -1324,11 +1540,15 @@ export const useDagServiceDeleteDag = < TError, { dagId: string; + dagRunId: string; }, TContext >({ - mutationFn: ({ dagId }) => - DagService.deleteDag({ dagId }) as unknown as Promise, + mutationFn: ({ dagId, dagRunId }) => + DagRunService.deleteDagRun({ + dagId, + dagRunId, + }) as unknown as Promise, ...options, }); /** diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 6ceed83349de..1222b7f5536c 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -3,6 +3,7 @@ import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, @@ -160,132 +161,69 @@ export const useDagsServiceRecentDagRunsSuspense = < ...options, }); /** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnectionSuspense = < - TData = Common.ConnectionServiceGetConnectionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn( - { connectionId }, - queryKey, - ), - queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, - ...options, - }); -/** - * Get Connections - * Get all connection entries. + * List Backfills * @param data The data for the request. + * @param data.dagId * @param data.limit * @param data.offset * @param data.orderBy - * @returns ConnectionCollectionResponse Successful Response + * @returns unknown Successful Response * @throws ApiError */ -export const useConnectionServiceGetConnectionsSuspense = < - TData = Common.ConnectionServiceGetConnectionsDefaultResponse, +export const useBackfillServiceListBackfillsSuspense = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { + dagId, limit, offset, orderBy, }: { + dagId: string; limit?: number; offset?: number; orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( - { limit, offset, orderBy }, - queryKey, - ), - queryFn: () => - ConnectionService.getConnections({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRunSuspense = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn( - { dagId, dagRunId }, + queryKey: Common.UseBackfillServiceListBackfillsKeyFn( + { dagId, limit, offset, orderBy }, queryKey, ), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options, }); /** - * Get Dag Source - * Get source code using file token. + * Get Backfill * @param data The data for the request. - * @param data.fileToken - * @param data.accept - * @returns DAGSourceResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useDagSourceServiceGetDagSourceSuspense = < - TData = Common.DagSourceServiceGetDagSourceDefaultResponse, +export const useBackfillServiceGetBackfillSuspense = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - accept, - fileToken, + backfillId, }: { - accept?: string; - fileToken: string; + backfillId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( - { accept, fileToken }, + queryKey: Common.UseBackfillServiceGetBackfillKeyFn( + { backfillId }, queryKey, ), - queryFn: () => - DagSourceService.getDagSource({ accept, fileToken }) as TData, + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options, }); /** @@ -463,6 +401,135 @@ export const useDagServiceGetDagDetailsSuspense = < queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options, }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnectionSuspense = < + TData = Common.ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn( + { connectionId }, + queryKey, + ), + queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, + ...options, + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnectionsSuspense = < + TData = Common.ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ConnectionService.getConnections({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRunSuspense = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSourceSuspense = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( + { accept, fileToken }, + queryKey, + ), + queryFn: () => + DagSourceService.getDagSource({ accept, fileToken }) as TData, + ...options, + }); /** * Get Event Log * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 3f74dc46a8a8..5624358636cb 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -89,6 +89,48 @@ export const $AppBuilderViewResponse = { description: "Serializer for AppBuilder View responses.", } as const; +export const $BackfillPostBody = { + properties: { + dag_id: { + type: "string", + title: "Dag Id", + }, + from_date: { + type: "string", + format: "date-time", + title: "From Date", + }, + to_date: { + type: "string", + format: "date-time", + title: "To Date", + }, + run_backwards: { + type: "boolean", + title: "Run Backwards", + default: false, + }, + dag_run_conf: { + type: "object", + title: "Dag Run Conf", + default: {}, + }, + reprocess_behavior: { + $ref: "#/components/schemas/ReprocessBehavior", + default: "none", + }, + max_active_runs: { + type: "integer", + title: "Max Active Runs", + default: 10, + }, + }, + type: "object", + required: ["dag_id", "from_date", "to_date"], + title: "BackfillPostBody", + description: "Object used for create backfill request.", +} as const; + export const $BaseInfoSchema = { properties: { status: { @@ -2216,6 +2258,15 @@ export const $ProviderResponse = { description: "Provider serializer for responses.", } as const; +export const $ReprocessBehavior = { + type: "string", + enum: ["failed", "completed", "none"], + title: "ReprocessBehavior", + description: `Internal enum for setting reprocess behavior in a backfill. + +:meta private:`, +} as const; + export const $SchedulerInfoSchema = { properties: { status: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 6c42a500e974..e82d0c5146fa 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -9,20 +9,18 @@ import type { HistoricalMetricsResponse, RecentDagRunsData, RecentDagRunsResponse, - DeleteConnectionData, - DeleteConnectionResponse, - GetConnectionData, - GetConnectionResponse, - GetConnectionsData, - GetConnectionsResponse, - GetDagRunData, - GetDagRunResponse, - DeleteDagRunData, - DeleteDagRunResponse, - PatchDagRunStateData, - PatchDagRunStateResponse, - GetDagSourceData, - GetDagSourceResponse, + ListBackfillsData, + ListBackfillsResponse, + CreateBackfillData, + CreateBackfillResponse, + GetBackfillData, + GetBackfillResponse, + PauseBackfillData, + PauseBackfillResponse, + UnpauseBackfillData, + UnpauseBackfillResponse, + CancelBackfillData, + CancelBackfillResponse, GetDagsData, GetDagsResponse, PatchDagsData, @@ -37,6 +35,20 @@ import type { DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, + DeleteConnectionData, + DeleteConnectionResponse, + GetConnectionData, + GetConnectionResponse, + GetConnectionsData, + GetConnectionsResponse, + GetDagRunData, + GetDagRunResponse, + DeleteDagRunData, + DeleteDagRunResponse, + PatchDagRunStateData, + PatchDagRunStateResponse, + GetDagSourceData, + GetDagSourceResponse, GetEventLogData, GetEventLogResponse, GetHealthResponse, @@ -168,108 +180,77 @@ export class DagsService { } } -export class ConnectionService { - /** - * Delete Connection - * Delete a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns void Successful Response - * @throws ApiError - */ - public static deleteConnection( - data: DeleteConnectionData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/public/connections/{connection_id}", - path: { - connection_id: data.connectionId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - +export class BackfillService { /** - * Get Connection - * Get a connection entry. + * List Backfills * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns unknown Successful Response * @throws ApiError */ - public static getConnection( - data: GetConnectionData, - ): CancelablePromise { + public static listBackfills( + data: ListBackfillsData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/connections/{connection_id}", - path: { - connection_id: data.connectionId, + url: "/public/backfills/", + query: { + dag_id: data.dagId, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, }, errors: { 401: "Unauthorized", 403: "Forbidden", - 404: "Not Found", 422: "Validation Error", }, }); } /** - * Get Connections - * Get all connection entries. + * Create Backfill * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ConnectionCollectionResponse Successful Response + * @param data.requestBody + * @returns unknown Successful Response * @throws ApiError */ - public static getConnections( - data: GetConnectionsData = {}, - ): CancelablePromise { + public static createBackfill( + data: CreateBackfillData, + ): CancelablePromise { return __request(OpenAPI, { - method: "GET", - url: "/public/connections/", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, + method: "POST", + url: "/public/backfills/", + body: data.requestBody, + mediaType: "application/json", errors: { 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", + 409: "Conflict", 422: "Validation Error", }, }); } -} -export class DagRunService { /** - * Get Dag Run + * Get Backfill * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ - public static getDagRun( - data: GetDagRunData, - ): CancelablePromise { + public static getBackfill( + data: GetBackfillData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + url: "/public/backfills/{backfill_id}", path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, + backfill_id: data.backfillId, }, errors: { 401: "Unauthorized", @@ -281,99 +262,78 @@ export class DagRunService { } /** - * Delete Dag Run - * Delete a DAG Run entry. + * Pause Backfill * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns void Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ - public static deleteDagRun( - data: DeleteDagRunData, - ): CancelablePromise { + public static pauseBackfill( + data: PauseBackfillData, + ): CancelablePromise { return __request(OpenAPI, { - method: "DELETE", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + method: "PUT", + url: "/public/backfills/{backfill_id}/pause", path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, + backfill_id: data.backfillId, }, errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", + 409: "Conflict", 422: "Validation Error", }, }); } /** - * Patch Dag Run State - * Modify a DAG Run. + * Unpause Backfill * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @param data.updateMask - * @returns DAGRunResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ - public static patchDagRunState( - data: PatchDagRunStateData, - ): CancelablePromise { + public static unpauseBackfill( + data: UnpauseBackfillData, + ): CancelablePromise { return __request(OpenAPI, { - method: "PATCH", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + method: "PUT", + url: "/public/backfills/{backfill_id}/unpause", path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - query: { - update_mask: data.updateMask, + backfill_id: data.backfillId, }, - body: data.requestBody, - mediaType: "application/json", errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", + 409: "Conflict", 422: "Validation Error", }, }); } -} -export class DagSourceService { /** - * Get Dag Source - * Get source code using file token. + * Cancel Backfill * @param data The data for the request. - * @param data.fileToken - * @param data.accept - * @returns DAGSourceResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ - public static getDagSource( - data: GetDagSourceData, - ): CancelablePromise { + public static cancelBackfill( + data: CancelBackfillData, + ): CancelablePromise { return __request(OpenAPI, { - method: "GET", - url: "/public/dagSources/{file_token}", + method: "PUT", + url: "/public/backfills/{backfill_id}/cancel", path: { - file_token: data.fileToken, - }, - headers: { - accept: data.accept, + backfill_id: data.backfillId, }, errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", - 406: "Not Acceptable", + 409: "Conflict", 422: "Validation Error", }, }); @@ -613,6 +573,218 @@ export class DagService { } } +export class ConnectionService { + /** + * Delete Connection + * Delete a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns void Successful Response + * @throws ApiError + */ + public static deleteConnection( + data: DeleteConnectionData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/connections/{connection_id}", + path: { + connection_id: data.connectionId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ + public static getConnection( + data: GetConnectionData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/connections/{connection_id}", + path: { + connection_id: data.connectionId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ + public static getConnections( + data: GetConnectionsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/connections/", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class DagRunService { + /** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static getDagRun( + data: GetDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Delete Dag Run + * Delete a DAG Run entry. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response + * @throws ApiError + */ + public static deleteDagRun( + data: DeleteDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Patch Dag Run State + * Modify a DAG Run. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @param data.updateMask + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static patchDagRunState( + data: PatchDagRunStateData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PATCH", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class DagSourceService { + /** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ + public static getDagSource( + data: GetDagSourceData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dagSources/{file_token}", + path: { + file_token: data.fileToken, + }, + headers: { + accept: data.accept, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 406: "Not Acceptable", + 422: "Validation Error", + }, + }); + } +} + export class EventLogService { /** * Get Event Log diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 4a06652e3802..2c11dd03cc0a 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -21,6 +21,21 @@ export type AppBuilderViewResponse = { [key: string]: unknown; }; +/** + * Object used for create backfill request. + */ +export type BackfillPostBody = { + dag_id: string; + from_date: string; + to_date: string; + run_backwards?: boolean; + dag_run_conf?: { + [key: string]: unknown; + }; + reprocess_behavior?: ReprocessBehavior; + max_active_runs?: number; +}; + /** * Base status field for metadatabase and scheduler. */ @@ -502,6 +517,13 @@ export type ProviderResponse = { version: string; }; +/** + * Internal enum for setting reprocess behavior in a backfill. + * + * :meta private: + */ +export type ReprocessBehavior = "failed" | "completed" | "none"; + /** * Schema for Scheduler info. */ @@ -674,55 +696,44 @@ export type RecentDagRunsData = { export type RecentDagRunsResponse = DAGWithLatestDagRunsCollectionResponse; -export type DeleteConnectionData = { - connectionId: string; -}; - -export type DeleteConnectionResponse = void; - -export type GetConnectionData = { - connectionId: string; -}; - -export type GetConnectionResponse = ConnectionResponse; - -export type GetConnectionsData = { +export type ListBackfillsData = { + dagId: string; limit?: number; offset?: number; orderBy?: string; }; -export type GetConnectionsResponse = ConnectionCollectionResponse; +export type ListBackfillsResponse = unknown; -export type GetDagRunData = { - dagId: string; - dagRunId: string; +export type CreateBackfillData = { + requestBody: BackfillPostBody; }; -export type GetDagRunResponse = DAGRunResponse; +export type CreateBackfillResponse = unknown; -export type DeleteDagRunData = { - dagId: string; - dagRunId: string; +export type GetBackfillData = { + backfillId: string; }; -export type DeleteDagRunResponse = void; +export type GetBackfillResponse = unknown; -export type PatchDagRunStateData = { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: Array | null; +export type PauseBackfillData = { + backfillId: unknown; }; -export type PatchDagRunStateResponse = DAGRunResponse; +export type PauseBackfillResponse = unknown; -export type GetDagSourceData = { - accept?: string; - fileToken: string; +export type UnpauseBackfillData = { + backfillId: unknown; }; -export type GetDagSourceResponse = DAGSourceResponse; +export type UnpauseBackfillResponse = unknown; + +export type CancelBackfillData = { + backfillId: unknown; +}; + +export type CancelBackfillResponse = unknown; export type GetDagsData = { dagDisplayNamePattern?: string | null; @@ -789,6 +800,56 @@ export type GetDagDetailsData = { export type GetDagDetailsResponse = DAGDetailsResponse; +export type DeleteConnectionData = { + connectionId: string; +}; + +export type DeleteConnectionResponse = void; + +export type GetConnectionData = { + connectionId: string; +}; + +export type GetConnectionResponse = ConnectionResponse; + +export type GetConnectionsData = { + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type GetConnectionsResponse = ConnectionCollectionResponse; + +export type GetDagRunData = { + dagId: string; + dagRunId: string; +}; + +export type GetDagRunResponse = DAGRunResponse; + +export type DeleteDagRunData = { + dagId: string; + dagRunId: string; +}; + +export type DeleteDagRunResponse = void; + +export type PatchDagRunStateData = { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: Array | null; +}; + +export type PatchDagRunStateResponse = DAGRunResponse; + +export type GetDagSourceData = { + accept?: string; + fileToken: string; +}; + +export type GetDagSourceResponse = DAGSourceResponse; + export type GetEventLogData = { eventLogId: number; }; @@ -960,14 +1021,14 @@ export type $OpenApiTs = { }; }; }; - "/public/connections/{connection_id}": { - delete: { - req: DeleteConnectionData; + "/public/backfills/": { + get: { + req: ListBackfillsData; res: { /** * Successful Response */ - 204: void; + 200: unknown; /** * Unauthorized */ @@ -976,23 +1037,19 @@ export type $OpenApiTs = { * Forbidden */ 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - get: { - req: GetConnectionData; + post: { + req: CreateBackfillData; res: { /** * Successful Response */ - 200: ConnectionResponse; + 200: unknown; /** * Unauthorized */ @@ -1005,6 +1062,10 @@ export type $OpenApiTs = { * Not Found */ 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; /** * Validation Error */ @@ -1012,14 +1073,14 @@ export type $OpenApiTs = { }; }; }; - "/public/connections/": { + "/public/backfills/{backfill_id}": { get: { - req: GetConnectionsData; + req: GetBackfillData; res: { /** * Successful Response */ - 200: ConnectionCollectionResponse; + 200: unknown; /** * Unauthorized */ @@ -1039,14 +1100,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { - get: { - req: GetDagRunData; + "/public/backfills/{backfill_id}/pause": { + put: { + req: PauseBackfillData; res: { /** * Successful Response */ - 200: DAGRunResponse; + 200: unknown; /** * Unauthorized */ @@ -1060,51 +1121,24 @@ export type $OpenApiTs = { */ 404: HTTPExceptionResponse; /** - * Validation Error + * Conflict */ - 422: HTTPValidationError; - }; - }; - delete: { - req: DeleteDagRunData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; + 409: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - patch: { - req: PatchDagRunStateData; + }; + "/public/backfills/{backfill_id}/unpause": { + put: { + req: UnpauseBackfillData; res: { /** * Successful Response */ - 200: DAGRunResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; + 200: unknown; /** * Unauthorized */ @@ -1117,6 +1151,10 @@ export type $OpenApiTs = { * Not Found */ 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; /** * Validation Error */ @@ -1124,18 +1162,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dagSources/{file_token}": { - get: { - req: GetDagSourceData; + "/public/backfills/{backfill_id}/cancel": { + put: { + req: CancelBackfillData; res: { /** * Successful Response */ - 200: DAGSourceResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; + 200: unknown; /** * Unauthorized */ @@ -1149,9 +1183,9 @@ export type $OpenApiTs = { */ 404: HTTPExceptionResponse; /** - * Not Acceptable + * Conflict */ - 406: HTTPExceptionResponse; + 409: HTTPExceptionResponse; /** * Validation Error */ @@ -1346,6 +1380,205 @@ export type $OpenApiTs = { }; }; }; + "/public/connections/{connection_id}": { + delete: { + req: DeleteConnectionData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetConnectionData; + res: { + /** + * Successful Response + */ + 200: ConnectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/connections/": { + get: { + req: GetConnectionsData; + res: { + /** + * Successful Response + */ + 200: ConnectionCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { + get: { + req: GetDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagRunData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchDagRunStateData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dagSources/{file_token}": { + get: { + req: GetDagSourceData; + res: { + /** + * Successful Response + */ + 200: DAGSourceResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Not Acceptable + */ + 406: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/eventLogs/{event_log_id}": { get: { req: GetEventLogData; diff --git a/airflow/www/static/js/types/api-generated.ts b/airflow/www/static/js/types/api-generated.ts index 616f6fbfa290..7526c340b29f 100644 --- a/airflow/www/static/js/types/api-generated.ts +++ b/airflow/www/static/js/types/api-generated.ts @@ -6,50 +6,6 @@ import type { CamelCasedPropertiesDeep } from "type-fest"; */ export interface paths { - "/backfills": { - get: operations["list_backfills"]; - post: operations["create_backfill"]; - }; - "/backfills/{backfill_id}": { - get: operations["get_backfill"]; - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - }; - "/backfills/{backfill_id}/pause": { - post: operations["pause_backfill"]; - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - }; - "/backfills/{backfill_id}/unpause": { - post: operations["unpause_backfill"]; - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - }; - "/backfills/{backfill_id}/cancel": { - /** - * When a backfill is cancelled, all queued dag runs will be marked as failed. - * Running dag runs will be allowed to continue. - */ - post: operations["cancel_backfill"]; - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - }; "/connections": { get: operations["get_connections"]; post: operations["post_connection"]; @@ -2787,125 +2743,6 @@ export interface components { } export interface operations { - list_backfills: { - parameters: { - query: { - /** List backfills for this dag. */ - dag_id: string; - }; - }; - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["BackfillCollection"]; - }; - }; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - }; - }; - create_backfill: { - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - 400: components["responses"]["BadRequest"]; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - }; - requestBody: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - }; - get_backfill: { - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - 404: components["responses"]["NotFound"]; - }; - }; - pause_backfill: { - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - 404: components["responses"]["NotFound"]; - 409: components["responses"]["Conflict"]; - }; - }; - unpause_backfill: { - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - 404: components["responses"]["NotFound"]; - 409: components["responses"]["Conflict"]; - }; - }; - /** - * When a backfill is cancelled, all queued dag runs will be marked as failed. - * Running dag runs will be allowed to continue. - */ - cancel_backfill: { - parameters: { - path: { - /** The integer id identifying the backfill entity. */ - backfill_id: components["parameters"]["BackfillIdPath"]; - }; - }; - responses: { - /** Success. */ - 200: { - content: { - "application/json": components["schemas"]["Backfill"]; - }; - }; - 401: components["responses"]["Unauthenticated"]; - 403: components["responses"]["PermissionDenied"]; - 404: components["responses"]["NotFound"]; - 409: components["responses"]["Conflict"]; - }; - }; get_connections: { parameters: { query: { @@ -5642,24 +5479,6 @@ export type HealthStatus = CamelCasedPropertiesDeep< export type Operations = operations; /* Types for operation variables */ -export type ListBackfillsVariables = CamelCasedPropertiesDeep< - operations["list_backfills"]["parameters"]["query"] ->; -export type CreateBackfillVariables = CamelCasedPropertiesDeep< - operations["create_backfill"]["requestBody"]["content"]["application/json"] ->; -export type GetBackfillVariables = CamelCasedPropertiesDeep< - operations["get_backfill"]["parameters"]["path"] ->; -export type PauseBackfillVariables = CamelCasedPropertiesDeep< - operations["pause_backfill"]["parameters"]["path"] ->; -export type UnpauseBackfillVariables = CamelCasedPropertiesDeep< - operations["unpause_backfill"]["parameters"]["path"] ->; -export type CancelBackfillVariables = CamelCasedPropertiesDeep< - operations["cancel_backfill"]["parameters"]["path"] ->; export type GetConnectionsVariables = CamelCasedPropertiesDeep< operations["get_connections"]["parameters"]["query"] >; diff --git a/tests/api_connexion/endpoints/test_backfill_endpoint.py b/tests/api_connexion/endpoints/test_backfill_endpoint.py deleted file mode 100644 index 14a5078c539a..000000000000 --- a/tests/api_connexion/endpoints/test_backfill_endpoint.py +++ /dev/null @@ -1,442 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime -from unittest import mock - -import pendulum -import pytest - -from airflow.models import DagBag, DagModel -from airflow.models.backfill import Backfill, ReprocessBehavior -from airflow.models.dag import DAG -from airflow.operators.empty import EmptyOperator -from airflow.utils import timezone -from airflow.utils.session import provide_session - -from tests_common.test_utils.api_connexion_utils import create_user, delete_user -from tests_common.test_utils.db import ( - clear_db_backfills, - clear_db_dags, - clear_db_runs, - clear_db_serialized_dags, -) - -pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] - - -DAG_ID = "test_dag" -TASK_ID = "op1" -DAG2_ID = "test_dag2" -DAG3_ID = "test_dag3" -UTC_JSON_REPR = "UTC" if pendulum.__version__.startswith("3") else "Timezone('UTC')" - - -def _clean_db(): - clear_db_backfills() - clear_db_runs() - clear_db_dags() - clear_db_serialized_dags() - - -@pytest.fixture(autouse=True) -def clean_db(): - _clean_db() - yield - _clean_db() - - -@pytest.fixture(scope="module") -def configured_app(minimal_app_for_api): - app = minimal_app_for_api - - create_user( - app, - username="test", - role_name="admin", - ) - create_user(app, username="test_no_permissions", role_name=None) - - with DAG( - DAG_ID, - schedule=None, - start_date=datetime(2020, 6, 15), - doc_md="details", - params={"foo": 1}, - tags=["example"], - ) as dag: - EmptyOperator(task_id=TASK_ID) - - with DAG(DAG2_ID, schedule=None, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md - EmptyOperator(task_id=TASK_ID) - - with DAG(DAG3_ID, schedule=None) as dag3: # DAG start_date set to None - EmptyOperator(task_id=TASK_ID, start_date=datetime(2019, 6, 12)) - - dag_bag = DagBag(os.devnull, include_examples=False) - dag_bag.dags = {dag.dag_id: dag, dag2.dag_id: dag2, dag3.dag_id: dag3} - - app.dag_bag = dag_bag - - yield app - - delete_user(app, username="test") - delete_user(app, username="test_no_permissions") - - -class TestBackfillEndpoint: - @pytest.fixture(autouse=True) - def setup_attrs(self, configured_app) -> None: - self.app = configured_app - self.client = self.app.test_client() # type:ignore - self.dag_id = DAG_ID - self.dag2_id = DAG2_ID - self.dag3_id = DAG3_ID - - @provide_session - def _create_dag_models(self, *, count=1, dag_id_prefix="TEST_DAG", is_paused=False, session=None): - dags = [] - for num in range(1, count + 1): - dag_model = DagModel( - dag_id=f"{dag_id_prefix}_{num}", - fileloc=f"/tmp/dag_{num}.py", - is_active=True, - timetable_summary="0 0 * * *", - is_paused=is_paused, - ) - session.add(dag_model) - dags.append(dag_model) - return dags - - @provide_session - def _create_deactivated_dag(self, session=None): - dag_model = DagModel( - dag_id="TEST_DAG_DELETED_1", - fileloc="/tmp/dag_del_1.py", - schedule_interval="2 2 * * *", - is_active=False, - ) - session.add(dag_model) - - -class TestListBackfills(TestBackfillEndpoint): - def test_should_respond_200(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - b = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) - session.add(b) - session.commit() - response = self.client.get( - f"/api/v1/backfills?dag_id={dag.dag_id}", - environ_overrides={"REMOTE_USER": "test"}, - ) - assert response.status_code == 200 - assert response.json == { - "backfills": [ - { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": None, - "from_date": from_date.isoformat(), - "id": b.id, - "is_paused": False, - "reprocess_behavior": "none", - "max_active_runs": 10, - "to_date": to_date.isoformat(), - "updated_at": mock.ANY, - } - ], - "total_entries": 1, - } - - @pytest.mark.parametrize( - "user, expected", - [ - ("test_no_permissions", 403), - ("test", 200), - (None, 401), - ], - ) - def test_should_respond_200_with_granular_dag_access(self, user, expected, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - b = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - - session.add(b) - session.commit() - kwargs = {} - if user: - kwargs.update(environ_overrides={"REMOTE_USER": user}) - response = self.client.get("/api/v1/backfills?dag_id=TEST_DAG_1", **kwargs) - assert response.status_code == expected - - -class TestGetBackfill(TestBackfillEndpoint): - def test_should_respond_200(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) - session.add(backfill) - session.commit() - response = self.client.get( - f"/api/v1/backfills/{backfill.id}", - environ_overrides={"REMOTE_USER": "test"}, - ) - assert response.status_code == 200 - assert response.json == { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": None, - "from_date": from_date.isoformat(), - "id": backfill.id, - "is_paused": False, - "reprocess_behavior": "none", - "max_active_runs": 10, - "to_date": to_date.isoformat(), - "updated_at": mock.ANY, - } - - def test_no_exist(self, session): - response = self.client.get( - f"/api/v1/backfills/{23198409834208}", - environ_overrides={"REMOTE_USER": "test"}, - ) - assert response.status_code == 404 - assert response.json.get("title") == "Backfill not found" - - @pytest.mark.parametrize( - "user, expected", - [ - ("test_no_permissions", 403), - ("test", 200), - (None, 401), - ], - ) - def test_should_respond_200_with_granular_dag_access(self, user, expected, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - if user: - kwargs.update(environ_overrides={"REMOTE_USER": user}) - response = self.client.get(f"/api/v1/backfills/{backfill.id}", **kwargs) - assert response.status_code == expected - - -class TestCreateBackfill(TestBackfillEndpoint): - @pytest.mark.parametrize( - "user, expected", - [ - ("test_no_permissions", 403), - ("test", 200), - (None, 401), - ], - ) - @pytest.mark.parametrize( - "repro_act, repro_exp", - [ - (None, ReprocessBehavior.NONE), - ("none", ReprocessBehavior.NONE), - ("failed", ReprocessBehavior.FAILED), - ("completed", ReprocessBehavior.COMPLETED), - ], - ) - def test_create_backfill(self, repro_act, repro_exp, user, expected, session, dag_maker): - if repro_act is not None and expected > 300: - pytest.skip("this combination not needed") - with dag_maker(session=session, dag_id="TEST_DAG_1", schedule="0 * * * *") as dag: - EmptyOperator(task_id="mytask") - session.query(DagModel).all() - from_date = pendulum.parse("2024-01-01") - from_date_iso = from_date.isoformat() - to_date = pendulum.parse("2024-02-01") - to_date_iso = to_date.isoformat() - max_active_runs = 5 - data = { - "dag_id": dag.dag_id, - "from_date": f"{from_date_iso}", - "to_date": f"{to_date_iso}", - "max_active_runs": max_active_runs, - "reverse": False, - "dag_run_conf": {"param1": "val1", "param2": True}, - } - if repro_act is not None: - data["reprocess_behavior"] = repro_act - kwargs = {} - if user: - kwargs.update(environ_overrides={"REMOTE_USER": user}) - - response = self.client.post( - "/api/v1/backfills", - json=data, - **kwargs, - ) - assert response.status_code == expected - if expected < 300: - assert response.json == { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": {"param1": "val1", "param2": True}, - "from_date": from_date_iso, - "id": mock.ANY, - "is_paused": False, - "reprocess_behavior": repro_exp, - "max_active_runs": 5, - "to_date": to_date_iso, - "updated_at": mock.ANY, - } - - -class TestPauseBackfill(TestBackfillEndpoint): - def test_should_respond_200(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) - session.add(backfill) - session.commit() - response = self.client.post( - f"/api/v1/backfills/{backfill.id}/pause", - environ_overrides={"REMOTE_USER": "test"}, - ) - assert response.status_code == 200 - assert response.json == { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": None, - "from_date": from_date.isoformat(), - "id": backfill.id, - "is_paused": True, - "reprocess_behavior": "none", - "max_active_runs": 10, - "to_date": to_date.isoformat(), - "updated_at": mock.ANY, - } - - @pytest.mark.parametrize( - "user, expected", - [ - ("test_no_permissions", 403), - ("test", 200), - (None, 401), - ], - ) - def test_should_respond_200_with_granular_dag_access(self, user, expected, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - if user: - kwargs.update(environ_overrides={"REMOTE_USER": user}) - response = self.client.post(f"/api/v1/backfills/{backfill.id}/pause", **kwargs) - assert response.status_code == expected - - -class TestCancelBackfill(TestBackfillEndpoint): - def test_should_respond_200(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) - session.add(backfill) - session.commit() - response = self.client.post( - f"/api/v1/backfills/{backfill.id}/cancel", - environ_overrides={"REMOTE_USER": "test"}, - ) - assert response.status_code == 200 - assert response.json == { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": None, - "from_date": from_date.isoformat(), - "id": backfill.id, - "is_paused": True, - "reprocess_behavior": "none", - "max_active_runs": 10, - "to_date": to_date.isoformat(), - "updated_at": mock.ANY, - } - assert pendulum.parse(response.json["completed_at"]) - # now it is marked as completed - assert pendulum.parse(response.json["completed_at"]) - - # get conflict when canceling already-canceled backfill - response = self.client.post( - f"/api/v1/backfills/{backfill.id}/cancel", environ_overrides={"REMOTE_USER": "test"} - ) - assert response.status_code == 409 - - @pytest.mark.parametrize( - "user, expected", - [ - ("test_no_permissions", 403), - ("test", 200), - (None, 401), - ], - ) - def test_should_respond_200_with_granular_dag_access(self, user, expected, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - if user: - kwargs.update(environ_overrides={"REMOTE_USER": user}) - response = self.client.post(f"/api/v1/backfills/{backfill.id}/cancel", **kwargs) - assert response.status_code == expected - if response.status_code < 300: - # now it is marked as completed - assert pendulum.parse(response.json["completed_at"]) - - # get conflict when canceling already-canceled backfill - response = self.client.post(f"/api/v1/backfills/{backfill.id}/cancel", **kwargs) - assert response.status_code == 409 diff --git a/tests/api_connexion/schemas/test_backfill_schema.py b/tests/api_connexion/schemas/test_backfill_schema.py deleted file mode 100644 index b9c4e91a59f8..000000000000 --- a/tests/api_connexion/schemas/test_backfill_schema.py +++ /dev/null @@ -1,55 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from airflow.api_connexion.schemas.backfill_schema import BackfillCollection, backfill_collection_schema -from airflow.models.backfill import Backfill -from airflow.utils import timezone - - -class TestBackfillSchema: - def test_serialize_direct(self): - now = timezone.utcnow() - now_iso = now.isoformat() - b1 = Backfill( - dag_id="hi", - created_at=now, - completed_at=now, - from_date=now, - to_date=now, - updated_at=now, - ) - bc = BackfillCollection(backfills=[b1], total_entries=1) - out = backfill_collection_schema.dump(bc) - assert out == { - "backfills": [ - { - "completed_at": now_iso, - "created_at": now_iso, - "dag_id": "hi", - "dag_run_conf": None, - "from_date": now_iso, - "id": None, - "is_paused": None, - "reprocess_behavior": None, - "max_active_runs": None, - "to_date": now_iso, - "updated_at": now_iso, - } - ], - "total_entries": 1, - } diff --git a/tests/api_fastapi/core_api/routes/public/test_backfills.py b/tests/api_fastapi/core_api/routes/public/test_backfills.py new file mode 100644 index 000000000000..1c64b10848f4 --- /dev/null +++ b/tests/api_fastapi/core_api/routes/public/test_backfills.py @@ -0,0 +1,309 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import os +from datetime import datetime +from unittest import mock + +import pendulum +import pytest +from sqlalchemy import select + +from airflow.models import DagBag, DagModel, DagRun +from airflow.models.backfill import Backfill, BackfillDagRun, ReprocessBehavior, _create_backfill +from airflow.models.dag import DAG +from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.python import PythonOperator +from airflow.utils import timezone +from airflow.utils.session import provide_session +from airflow.utils.state import DagRunState + +from tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) + +pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] + + +DAG_ID = "test_dag" +TASK_ID = "op1" +DAG2_ID = "test_dag2" +DAG3_ID = "test_dag3" + + +def _clean_db(): + clear_db_backfills() + clear_db_runs() + clear_db_dags() + clear_db_serialized_dags() + + +@pytest.fixture(autouse=True) +def clean_db(): + _clean_db() + yield + _clean_db() + + +def make_dags(): + with DAG( + DAG_ID, + schedule=None, + start_date=datetime(2020, 6, 15), + doc_md="details", + params={"foo": 1}, + tags=["example"], + ) as dag: + EmptyOperator(task_id=TASK_ID) + + with DAG(DAG2_ID, schedule=None, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md + EmptyOperator(task_id=TASK_ID) + + with DAG(DAG3_ID, schedule=None) as dag3: # DAG start_date set to None + EmptyOperator(task_id=TASK_ID, start_date=datetime(2019, 6, 12)) + + dag_bag = DagBag(os.devnull, include_examples=False) + dag_bag.dags = {dag.dag_id: dag, dag2.dag_id: dag2, dag3.dag_id: dag3} + + +def to_iso(val): + return pendulum.instance(val).to_iso8601_string() + + +class TestBackfillEndpoint: + @provide_session + def _create_dag_models(self, *, count=1, dag_id_prefix="TEST_DAG", is_paused=False, session=None): + dags = [] + for num in range(1, count + 1): + dag_model = DagModel( + dag_id=f"{dag_id_prefix}_{num}", + fileloc=f"/tmp/dag_{num}.py", + is_active=True, + timetable_summary="0 0 * * *", + is_paused=is_paused, + ) + session.add(dag_model) + dags.append(dag_model) + return dags + + +class TestListBackfills(TestBackfillEndpoint): + def test_list_backfill(self, test_client, session): + (dag,) = self._create_dag_models() + from_date = timezone.utcnow() + to_date = timezone.utcnow() + b = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) + session.add(b) + session.commit() + response = test_client.get(f"/public/backfills?dag_id={dag.dag_id}") + assert response.status_code == 200 + assert response.json() == { + "backfills": [ + { + "completed_at": mock.ANY, + "created_at": mock.ANY, + "dag_id": "TEST_DAG_1", + "dag_run_conf": {}, + "from_date": to_iso(from_date), + "id": b.id, + "is_paused": False, + "reprocess_behavior": "none", + "max_active_runs": 10, + "to_date": to_iso(to_date), + "updated_at": mock.ANY, + } + ], + "total_entries": 1, + } + + +class TestGetBackfill(TestBackfillEndpoint): + def test_get_backfill(self, session, test_client): + (dag,) = self._create_dag_models() + from_date = timezone.utcnow() + to_date = timezone.utcnow() + backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) + session.add(backfill) + session.commit() + response = test_client.get(f"/public/backfills/{backfill.id}") + assert response.status_code == 200 + assert response.json() == { + "completed_at": mock.ANY, + "created_at": mock.ANY, + "dag_id": "TEST_DAG_1", + "dag_run_conf": {}, + "from_date": to_iso(from_date), + "id": backfill.id, + "is_paused": False, + "reprocess_behavior": "none", + "max_active_runs": 10, + "to_date": to_iso(to_date), + "updated_at": mock.ANY, + } + + def test_no_exist(self, session, test_client): + response = test_client.get(f"/public/backfills/{231984098}") + assert response.status_code == 404 + assert response.json().get("detail") == "Backfill not found" + + +class TestCreateBackfill(TestBackfillEndpoint): + @pytest.mark.parametrize( + "repro_act, repro_exp", + [ + (None, ReprocessBehavior.NONE), + ("none", ReprocessBehavior.NONE), + ("failed", ReprocessBehavior.FAILED), + ("completed", ReprocessBehavior.COMPLETED), + ], + ) + def test_create_backfill(self, repro_act, repro_exp, session, dag_maker, test_client): + with dag_maker(session=session, dag_id="TEST_DAG_1", schedule="0 * * * *") as dag: + EmptyOperator(task_id="mytask") + session.query(DagModel).all() + session.commit() + from_date = pendulum.parse("2024-01-01") + from_date_iso = to_iso(from_date) + to_date = pendulum.parse("2024-02-01") + to_date_iso = to_iso(to_date) + max_active_runs = 5 + data = { + "dag_id": dag.dag_id, + "from_date": f"{from_date_iso}", + "to_date": f"{to_date_iso}", + "max_active_runs": max_active_runs, + "run_backwards": False, + "dag_run_conf": {"param1": "val1", "param2": True}, + } + if repro_act is not None: + data["reprocess_behavior"] = repro_act + response = test_client.post( + url="/public/backfills", + json=data, + ) + assert response.status_code == 200 + assert response.json() == { + "completed_at": mock.ANY, + "created_at": mock.ANY, + "dag_id": "TEST_DAG_1", + "dag_run_conf": {"param1": "val1", "param2": True}, + "from_date": from_date_iso, + "id": mock.ANY, + "is_paused": False, + "reprocess_behavior": repro_exp, + "max_active_runs": 5, + "to_date": to_date_iso, + "updated_at": mock.ANY, + } + + +class TestCancelBackfill(TestBackfillEndpoint): + def test_cancel_backfill(self, session, test_client): + (dag,) = self._create_dag_models() + from_date = timezone.utcnow() + to_date = timezone.utcnow() + backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) + session.add(backfill) + session.commit() + response = test_client.put( + f"/public/backfills/{backfill.id}/cancel", + ) + assert response.status_code == 200 + assert response.json() == { + "completed_at": mock.ANY, + "created_at": mock.ANY, + "dag_id": "TEST_DAG_1", + "dag_run_conf": {}, + "from_date": to_iso(from_date), + "id": backfill.id, + "is_paused": True, + "reprocess_behavior": "none", + "max_active_runs": 10, + "to_date": to_iso(to_date), + "updated_at": mock.ANY, + } + assert pendulum.parse(response.json()["completed_at"]) + # now it is marked as completed + assert pendulum.parse(response.json()["completed_at"]) + + # get conflict when canceling already-canceled backfill + response = test_client.put(f"/public/backfills/{backfill.id}/cancel") + assert response.status_code == 409 + + def test_cancel_backfill_end_states(self, dag_maker, session, test_client): + """ + Queued runs should be marked *failed*. + Every other dag run should be left alone. + """ + with dag_maker(schedule="@daily") as dag: + PythonOperator(task_id="hi", python_callable=print) + b = _create_backfill( + dag_id=dag.dag_id, + from_date=timezone.datetime(2021, 1, 1), + to_date=timezone.datetime(2021, 1, 5), + max_active_runs=2, + reverse=False, + dag_run_conf={}, + ) + query = ( + select(DagRun) + .join(BackfillDagRun.dag_run) + .where(BackfillDagRun.backfill_id == b.id) + .order_by(BackfillDagRun.sort_ordinal) + ) + dag_runs = session.scalars(query).all() + dates = [str(x.logical_date.date()) for x in dag_runs] + expected_dates = ["2021-01-01", "2021-01-02", "2021-01-03", "2021-01-04", "2021-01-05"] + assert dates == expected_dates + assert all(x.state == DagRunState.QUEUED for x in dag_runs) + dag_runs[0].state = "running" + session.commit() + response = test_client.put(f"/public/backfills/{b.id}/cancel") + assert response.status_code == 200 + session.expunge_all() + dag_runs = session.scalars(query).all() + states = [x.state for x in dag_runs] + assert states == ["running", "failed", "failed", "failed", "failed"] + + +class TestPauseBackfill(TestBackfillEndpoint): + def test_pause_backfill(self, session, test_client): + (dag,) = self._create_dag_models() + from_date = timezone.utcnow() + to_date = timezone.utcnow() + backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date) + session.add(backfill) + session.commit() + response = test_client.put(f"/public/backfills/{backfill.id}/pause") + assert response.status_code == 200 + assert response.json() == { + "completed_at": mock.ANY, + "created_at": mock.ANY, + "dag_id": "TEST_DAG_1", + "dag_run_conf": {}, + "from_date": to_iso(from_date), + "id": backfill.id, + "is_paused": True, + "reprocess_behavior": "none", + "max_active_runs": 10, + "to_date": to_iso(to_date), + "updated_at": mock.ANY, + } diff --git a/tests/models/test_backfill.py b/tests/models/test_backfill.py index a67590e91a57..a32db07808e8 100644 --- a/tests/models/test_backfill.py +++ b/tests/models/test_backfill.py @@ -32,7 +32,6 @@ BackfillDagRun, BackfillDagRunExceptionReason, ReprocessBehavior, - _cancel_backfill, _create_backfill, ) from airflow.providers.standard.operators.python import PythonOperator @@ -323,41 +322,6 @@ def test_active_dag_run(dag_maker, session): ) -def test_cancel_backfill(dag_maker, session): - """ - Queued runs should be marked *failed*. - Every other dag run should be left alone. - """ - with dag_maker(schedule="@daily") as dag: - PythonOperator(task_id="hi", python_callable=print) - b = _create_backfill( - dag_id=dag.dag_id, - from_date=timezone.datetime(2021, 1, 1), - to_date=timezone.datetime(2021, 1, 5), - max_active_runs=2, - reverse=False, - dag_run_conf={}, - ) - query = ( - select(DagRun) - .join(BackfillDagRun.dag_run) - .where(BackfillDagRun.backfill_id == b.id) - .order_by(BackfillDagRun.sort_ordinal) - ) - dag_runs = session.scalars(query).all() - dates = [str(x.logical_date.date()) for x in dag_runs] - expected_dates = ["2021-01-01", "2021-01-02", "2021-01-03", "2021-01-04", "2021-01-05"] - assert dates == expected_dates - assert all(x.state == DagRunState.QUEUED for x in dag_runs) - dag_runs[0].state = "running" - session.commit() - _cancel_backfill(backfill_id=b.id) - session.expunge_all() - dag_runs = session.scalars(query).all() - states = [x.state for x in dag_runs] - assert states == ["running", "failed", "failed", "failed", "failed"] - - def create_next_run( *, is_backfill: bool, next_date: datetime, dag_id: str, dag_maker, reprocess=None, session: Session ): From 028c14ad54b2947272b1258474e7d2d7f08c646d Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 4 Nov 2024 18:35:58 +0100 Subject: [PATCH 019/137] Fix condition for using uv in some jobs (#43630) * Fix condition for using uv in some jobs The condition to check if pip is forced in the CI has a bug (GitHub actions bool behaviour is somewhat problematic) and uv has not been used really for some of the jobs. This is now fixed. --------- Co-authored-by: Wei Lee --- .github/workflows/build-images.yml | 4 ++-- .github/workflows/ci.yml | 8 ++++---- .github/workflows/finalize-tests.yml | 2 +- .github/workflows/k8s-tests.yml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 6bb8a9d21930..6b1160d6f17d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -204,7 +204,7 @@ jobs: pull-request-target: "true" is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" python-versions: ${{ needs.build-info.outputs.python-versions }} @@ -249,7 +249,7 @@ jobs: pull-request-target: "true" is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} image-tag: ${{ needs.build-info.outputs.image-tag }} platform: linux/amd64 python-versions: ${{ needs.build-info.outputs.python-versions }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index acf711c4a238..5a0f2ca6106f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -206,7 +206,7 @@ jobs: platform: "linux/amd64" python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} @@ -272,7 +272,7 @@ jobs: latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} generate-constraints: @@ -559,7 +559,7 @@ jobs: default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} @@ -642,7 +642,7 @@ jobs: kubernetes-versions-list-as-string: ${{ needs.build-info.outputs.kubernetes-versions-list-as-string }} kubernetes-combos-list-as-string: ${{ needs.build-info.outputs.kubernetes-combos-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} if: > ( needs.build-info.outputs.run-kubernetes-tests == 'true' || diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index c948984ee10c..6f9bc74168b4 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -149,7 +149,7 @@ jobs: python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} - use-uv: ${{ needs.build-info.outputs.force-pip && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 9a764e88c4e9..3b3e067038db 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -101,7 +101,7 @@ jobs: k8s-env-${{ steps.breeze.outputs.host-python-version }}-\ ${{ hashFiles('scripts/ci/kubernetes/k8s_requirements.txt','hatch_build.py') }}" - name: "Switch breeze to use uv" - run: breeze setup-config --use-uv + run: breeze setup config --use-uv if: inputs.use-uv == 'true' - name: Run complete K8S tests ${{ inputs.kubernetes-combos-list-as-string }} run: breeze k8s run-complete-tests --run-in-parallel --upgrade --no-copy-local-sources From b67885734f408dda9fad61e7cc58997c09cf8fea Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 4 Nov 2024 18:37:14 +0100 Subject: [PATCH 020/137] Full-on switching Breeze development to use uv (#43628) * Full-on switching Breeze development to use uv Since we are switching to uv, breeze dev environment can now also be managed with uv and if you fully switch to it, the workflow of managing breeze, setting the dev environment and upgrading dependencies is far simpler than airflow's so we should be able to follow the regular uv workflow for it - i.e. use uv to manage dependencies, sync, upgrade them etc. This will be really nice way for some of the maintainers to get to learn uv more and prepare for more standardisation in the future Airflow development (maybe) - when we will link uv.lock and constraints management. Documentation has been updated in Breeze's README to explain some basic way of using uv for breeze development. --- dev/breeze/README.md | 70 ++ dev/breeze/uv.lock | 1902 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1972 insertions(+) create mode 100644 dev/breeze/uv.lock diff --git a/dev/breeze/README.md b/dev/breeze/README.md index efc502668953..9a3f06066eb2 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -22,6 +22,7 @@ **Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - [Apache Airflow Breeze](#apache-airflow-breeze) +- [Setting up development env for Breeze](#setting-up-development-env-for-breeze) @@ -62,6 +63,75 @@ available when the package was installed. Since this file becomes part of the in to detect automatically if any of the files have changed. If they did, the user will be warned to upgrade their installations. +Setting up development env for Breeze +------------------------------------- + +> [!NOTE] +> This section is for developers of Breeze. If you are a user of Breeze, you do not need to read this section. + +Breeze is actively developed by Airflow maintainers and contributors, Airflow is an active project +and we are in the process of developing Airflow 3, so breeze requires a lot of adjustments to keep up +the dev environment in sync with Airflow 3 development - this is also why it is part of the same +repository as Airflow - because it needs to be closely synchronized with Airflow development. + +As of November 2024 Airflow switches to using `uv` as the main development environment for Airflow +and for Breeze. So the instructions below are for setting up the development environment for Breeze +using `uv`. However we are using only standard python packaging tools, so you can still use `pip` or +`pipenv` or other build frontends to install Breeze, but we recommend using `uv` as it is the most +convenient way to install, manage python packages and virtual environments. + +Unlike in Airflow, where we manage our own constraints, we use `uv` to manage requirements for Breeze +and we use `uv` to lock the dependencies. This way we can ensure that the dependencies are always +up-to-date and that the development environment is always consistent for different people. This is +why Breeze's `uv.lock` is committed to the repository and is used to install the dependencies by +default by Breeze. Here's how to install breeze with `uv` + + +1. Install `uv` - see [uv documentation](https://docs.astral.sh/uv/getting-started/installation/) + +> [!IMPORTANT] +> All the commands below should be executed while you are in `dev/breeze` directory of the Airflow repository. + +2. Create a new virtual environment for Breeze development: + +```shell +uv venv +``` + +3. Synchronize Breeze dependencies with `uv` to the latest dependencies stored in uv.lock file: + +```shell +uv sync +``` + +After syncing, the `.venv` directory will contain the virtual environment with all the dependencies +installed - you can use that environment to develop Breeze - for example with your favourite IDE +or text editor, you can also use `uv run` to run the scripts in the virtual environment. + +For example to run all tests in the virtual environment you can use: + +```shell +uv run pytest +``` + +4. Add/remove dependencies with `uv`: + +```shell +uv add +uv remove +``` + +5. Update and lock the dependencies (after adding them or periodically to keep them up-to-date): + +```shell +uv lock +``` + +Note that when you update dependencies/lock them you should commit the changes in `pyproject.toml` and `uv.lock`. + +See [uv documentation](https://docs.astral.sh/uv/getting-started/) for more details on using `uv`. + + PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/uv.lock b/dev/breeze/uv.lock new file mode 100644 index 000000000000..bcfc8c4ce785 --- /dev/null +++ b/dev/breeze/uv.lock @@ -0,0 +1,1902 @@ +version = 1 +requires-python = ">=3.9, <4" +resolution-markers = [ + "python_full_version < '3.13'", + "python_full_version >= '3.13'", +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/09/45b9b7a6d4e45c6bcb5bf61d19e3ab87df68e0601fa8c5293de3542546cc/anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c", size = 173422 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/f5/f2b75d2fc6f1a260f340f0e7c6a060f4dd2961cc16884ed851b0d18da06a/anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d", size = 90377 }, +] + +[[package]] +name = "apache-airflow-breeze" +version = "0.0.1" +source = { editable = "." } +dependencies = [ + { name = "black" }, + { name = "click" }, + { name = "filelock" }, + { name = "flit" }, + { name = "gitpython" }, + { name = "google-api-python-client" }, + { name = "google-auth-httplib2" }, + { name = "google-auth-oauthlib" }, + { name = "hatch" }, + { name = "inputimeout" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "packaging" }, + { name = "pipx" }, + { name = "pre-commit" }, + { name = "pre-commit-uv" }, + { name = "psutil" }, + { name = "pygithub" }, + { name = "pytest" }, + { name = "pytest-xdist" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "rich" }, + { name = "rich-click" }, + { name = "semver" }, + { name = "tabulate" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "twine" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", specifier = ">=23.11.0" }, + { name = "click", specifier = ">=8.1.7" }, + { name = "filelock", specifier = ">=3.13.0" }, + { name = "flit", specifier = ">=3.5.0" }, + { name = "gitpython", specifier = ">=3.1.40" }, + { name = "google-api-python-client", specifier = ">=2.142.0" }, + { name = "google-auth-httplib2", specifier = ">=0.2.0" }, + { name = "google-auth-oauthlib", specifier = ">=1.2.0" }, + { name = "hatch", specifier = "==1.9.4" }, + { name = "importlib-resources", marker = "python_full_version < '3.9'", specifier = ">=5.2,!=6.2.0,!=6.3.0,!=6.3.1" }, + { name = "inputimeout", specifier = ">=1.0.4" }, + { name = "jinja2", specifier = ">=3.1.0" }, + { name = "jsonschema", specifier = ">=4.19.1" }, + { name = "packaging", specifier = ">=23.2" }, + { name = "pipx", specifier = ">=1.4.1" }, + { name = "pre-commit", specifier = ">=3.5.0" }, + { name = "pre-commit-uv", specifier = ">=4.1.3" }, + { name = "psutil", specifier = ">=5.9.6" }, + { name = "pygithub", specifier = ">=2.1.1" }, + { name = "pytest", specifier = ">=8.2,<9" }, + { name = "pytest-xdist", specifier = ">=3.3.1" }, + { name = "pyyaml", specifier = ">=6.0.1" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "rich", specifier = ">=13.6.0" }, + { name = "rich-click", specifier = ">=1.7.1" }, + { name = "semver", specifier = ">=3.0.2" }, + { name = "tabulate", specifier = ">=0.9.0" }, + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.0.1" }, + { name = "twine", specifier = ">=4.0.2" }, +] + +[[package]] +name = "argcomplete" +version = "3.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/39/27605e133e7f4bb0c8e48c9a6b87101515e3446003e0442761f6a02ac35e/argcomplete-3.5.1.tar.gz", hash = "sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4", size = 82280 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/be/a606a6701d491cfae75583c80a6583f8abe9c36c0b9666e867e7cdd62fe8/argcomplete-3.5.1-py3-none-any.whl", hash = "sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363", size = 43498 }, +] + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181 }, +] + +[[package]] +name = "black" +version = "24.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, + { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, + { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, + { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, + { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, + { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, + { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, + { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, + { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, + { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, + { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, + { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, + { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, + { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, + { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, + { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, + { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, + { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, + { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, + { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, + { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, + { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, + { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, + { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, + { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, + { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, + { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, + { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, + { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 }, + { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 }, + { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 }, + { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 }, + { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 }, + { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 }, + { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 }, + { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 }, + { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 }, + { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 }, + { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 }, + { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 }, + { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 }, + { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 }, + { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 }, + { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, + { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, + { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, + { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, + { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, + { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, + { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, + { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, + { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, + { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, + { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, + { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, + { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, + { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/54/2f/28659eee7f5d003e0f5a3b572765bf76d6e0fe6601ab1f1b1dd4cba7e4f1/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa", size = 196326 }, + { url = "https://files.pythonhosted.org/packages/d1/18/92869d5c0057baa973a3ee2af71573be7b084b3c3d428fe6463ce71167f8/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a", size = 125614 }, + { url = "https://files.pythonhosted.org/packages/d6/27/327904c5a54a7796bb9f36810ec4173d2df5d88b401d2b95ef53111d214e/charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0", size = 120450 }, + { url = "https://files.pythonhosted.org/packages/a4/23/65af317914a0308495133b2d654cf67b11bbd6ca16637c4e8a38f80a5a69/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a", size = 140135 }, + { url = "https://files.pythonhosted.org/packages/f2/41/6190102ad521a8aa888519bb014a74251ac4586cde9b38e790901684f9ab/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242", size = 150413 }, + { url = "https://files.pythonhosted.org/packages/7b/ab/f47b0159a69eab9bd915591106859f49670c75f9a19082505ff16f50efc0/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b", size = 142992 }, + { url = "https://files.pythonhosted.org/packages/28/89/60f51ad71f63aaaa7e51a2a2ad37919985a341a1d267070f212cdf6c2d22/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62", size = 144871 }, + { url = "https://files.pythonhosted.org/packages/0c/48/0050550275fea585a6e24460b42465020b53375017d8596c96be57bfabca/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0", size = 146756 }, + { url = "https://files.pythonhosted.org/packages/dc/b5/47f8ee91455946f745e6c9ddbb0f8f50314d2416dd922b213e7d5551ad09/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd", size = 141034 }, + { url = "https://files.pythonhosted.org/packages/84/79/5c731059ebab43e80bf61fa51666b9b18167974b82004f18c76378ed31a3/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be", size = 149434 }, + { url = "https://files.pythonhosted.org/packages/ca/f3/0719cd09fc4dc42066f239cb3c48ced17fc3316afca3e2a30a4756fe49ab/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d", size = 152443 }, + { url = "https://files.pythonhosted.org/packages/f7/0e/c6357297f1157c8e8227ff337e93fd0a90e498e3d6ab96b2782204ecae48/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3", size = 150294 }, + { url = "https://files.pythonhosted.org/packages/54/9a/acfa96dc4ea8c928040b15822b59d0863d6e1757fba8bd7de3dc4f761c13/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742", size = 145314 }, + { url = "https://files.pythonhosted.org/packages/73/1c/b10a63032eaebb8d7bcb8544f12f063f41f5f463778ac61da15d9985e8b6/charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2", size = 94724 }, + { url = "https://files.pythonhosted.org/packages/c5/77/3a78bf28bfaa0863f9cfef278dbeadf55efe064eafff8c7c424ae3c4c1bf/charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca", size = 102159 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "cryptography" +version = "43.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/05/07b55d1fa21ac18c3a8c79f764e2514e6f6a9698f1be44994f5adf0d29db/cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805", size = 686989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f3/01fdf26701a26f4b4dbc337a26883ad5bccaa6f1bbbdd29cd89e22f18a1c/cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e", size = 6225303 }, + { url = "https://files.pythonhosted.org/packages/a3/01/4896f3d1b392025d4fcbecf40fdea92d3df8662123f6835d0af828d148fd/cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e", size = 3760905 }, + { url = "https://files.pythonhosted.org/packages/0a/be/f9a1f673f0ed4b7f6c643164e513dbad28dd4f2dcdf5715004f172ef24b6/cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f", size = 3977271 }, + { url = "https://files.pythonhosted.org/packages/4e/49/80c3a7b5514d1b416d7350830e8c422a4d667b6d9b16a9392ebfd4a5388a/cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6", size = 3746606 }, + { url = "https://files.pythonhosted.org/packages/0e/16/a28ddf78ac6e7e3f25ebcef69ab15c2c6be5ff9743dd0709a69a4f968472/cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18", size = 3986484 }, + { url = "https://files.pythonhosted.org/packages/01/f5/69ae8da70c19864a32b0315049866c4d411cce423ec169993d0434218762/cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd", size = 3852131 }, + { url = "https://files.pythonhosted.org/packages/fd/db/e74911d95c040f9afd3612b1f732e52b3e517cb80de8bf183be0b7d413c6/cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73", size = 4075647 }, + { url = "https://files.pythonhosted.org/packages/56/48/7b6b190f1462818b324e674fa20d1d5ef3e24f2328675b9b16189cbf0b3c/cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2", size = 2623873 }, + { url = "https://files.pythonhosted.org/packages/eb/b1/0ebff61a004f7f89e7b65ca95f2f2375679d43d0290672f7713ee3162aff/cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd", size = 3068039 }, + { url = "https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984", size = 6222984 }, + { url = "https://files.pythonhosted.org/packages/2f/78/55356eb9075d0be6e81b59f45c7b48df87f76a20e73893872170471f3ee8/cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5", size = 3762968 }, + { url = "https://files.pythonhosted.org/packages/2a/2c/488776a3dc843f95f86d2f957ca0fc3407d0242b50bede7fad1e339be03f/cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4", size = 3977754 }, + { url = "https://files.pythonhosted.org/packages/7c/04/2345ca92f7a22f601a9c62961741ef7dd0127c39f7310dffa0041c80f16f/cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7", size = 3749458 }, + { url = "https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405", size = 3988220 }, + { url = "https://files.pythonhosted.org/packages/21/ce/b9c9ff56c7164d8e2edfb6c9305045fbc0df4508ccfdb13ee66eb8c95b0e/cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16", size = 3853898 }, + { url = "https://files.pythonhosted.org/packages/2a/33/b3682992ab2e9476b9c81fff22f02c8b0a1e6e1d49ee1750a67d85fd7ed2/cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73", size = 4076592 }, + { url = "https://files.pythonhosted.org/packages/81/1e/ffcc41b3cebd64ca90b28fd58141c5f68c83d48563c88333ab660e002cd3/cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995", size = 2623145 }, + { url = "https://files.pythonhosted.org/packages/87/5c/3dab83cc4aba1f4b0e733e3f0c3e7d4386440d660ba5b1e3ff995feb734d/cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362", size = 3068026 }, + { url = "https://files.pythonhosted.org/packages/6f/db/d8b8a039483f25fc3b70c90bc8f3e1d4497a99358d610c5067bf3bd4f0af/cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c", size = 3144545 }, + { url = "https://files.pythonhosted.org/packages/93/90/116edd5f8ec23b2dc879f7a42443e073cdad22950d3c8ee834e3b8124543/cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3", size = 3679828 }, + { url = "https://files.pythonhosted.org/packages/d8/32/1e1d78b316aa22c0ba6493cc271c1c309969e5aa5c22c830a1d7ce3471e6/cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83", size = 3908132 }, + { url = "https://files.pythonhosted.org/packages/91/bb/cd2c13be3332e7af3cdf16154147952d39075b9f61ea5e6b5241bf4bf436/cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7", size = 2988811 }, + { url = "https://files.pythonhosted.org/packages/cc/fc/ff7c76afdc4f5933b5e99092528d4783d3d1b131960fc8b31eb38e076ca8/cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664", size = 3146844 }, + { url = "https://files.pythonhosted.org/packages/d7/29/a233efb3e98b13d9175dcb3c3146988ec990896c8fa07e8467cce27d5a80/cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08", size = 3681997 }, + { url = "https://files.pythonhosted.org/packages/c0/cf/c9eea7791b961f279fb6db86c3355cfad29a73141f46427af71852b23b95/cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa", size = 3905208 }, + { url = "https://files.pythonhosted.org/packages/21/ea/6c38ca546d5b6dab3874c2b8fc6b1739baac29bacdea31a8c6c0513b3cfa/cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff", size = 2989787 }, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/14/1e41f504a246fc224d2ac264c227975427a85caf37c3979979edb9b1b232/Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3", size = 2974416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", size = 9561 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, +] + +[[package]] +name = "editables" +version = "0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/4a/986d35164e2033ddfb44515168a281a7986e260d344cf369c3f52d4c3275/editables-0.5.tar.gz", hash = "sha256:309627d9b5c4adc0e668d8c6fa7bac1ba7c8c5d415c2d27f60f081f8e80d1de2", size = 14744 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/be/0f2f4a5e8adc114a02b63d92bf8edbfa24db6fc602fca83c885af2479e0e/editables-0.5-py3-none-any.whl", hash = "sha256:61e5ffa82629e0d8bfe09bc44a07db3c1ab8ed1ce78a6980732870f19b5e7d4c", size = 5098 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "flit" +version = "3.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "flit-core" }, + { name = "pip" }, + { name = "requests" }, + { name = "tomli-w" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/46/f84b8815d161e7392d124d3de6e5880d1d36a74162a77a5e2839dc3c8c68/flit-3.10.1.tar.gz", hash = "sha256:9c6258ae76d218ce60f9e39a43ca42006a3abcc5c44ea6bb2a1daa13857a8f1a", size = 143162 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/ba/d88b8f3253b4af5a88559aede6345975cc2b18ed77bf8daf977bbb9df2c5/flit-3.10.1-py3-none-any.whl", hash = "sha256:d79c19c2caae73cc486d3d827af6a11c1a84b9efdfab8d9683b714ec8d1dc1f1", size = 50683 }, +] + +[[package]] +name = "flit-core" +version = "3.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/ae/09427bea9227a33ec834ed5461432752fd5d02b14f93dd68406c91684622/flit_core-3.10.1.tar.gz", hash = "sha256:66e5b87874a0d6e39691f0e22f09306736b633548670ad3c09ec9db03c5662f7", size = 42842 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/2d/293fe6a58e73df57cc2b5e5cf2b17c6bb4fb5b0c390bab8f1e87bdc62529/flit_core-3.10.1-py3-none-any.whl", hash = "sha256:cb31a76e8b31ad3351bb89e531f64ef2b05d1e65bd939183250bf81ddf4922a8", size = 36389 }, +] + +[[package]] +name = "gitdb" +version = "4.0.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/0d/bbb5b5ee188dec84647a4664f3e11b06ade2bde568dbd489d9d64adef8ed/gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b", size = 394469 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/5b/8f0c4a5bb9fd491c277c21eff7ccae71b47d43c4446c9d0c6cff2fe8c2c4/gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4", size = 62721 }, +] + +[[package]] +name = "gitpython" +version = "3.1.43" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/a1/106fd9fa2dd989b6fb36e5893961f82992cf676381707253e0bf93eb1662/GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c", size = 214149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/bd/cc3a402a6439c15c3d4294333e13042b915bbeab54edc457c723931fed3f/GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff", size = 207337 }, +] + +[[package]] +name = "google-api-core" +version = "2.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/c2/425f97c2087affbd452a05d3faa08d97de333f2ca554733e1becab55ee4e/google_api_core-2.22.0.tar.gz", hash = "sha256:26f8d76b96477db42b55fd02a33aae4a42ec8b86b98b94969b7333a2c828bf35", size = 159700 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/7b/1137a9811be73d8ff8238eb2d9f60f0bc0bb6a1edd87f9d47557ab937a2b/google_api_core-2.22.0-py3-none-any.whl", hash = "sha256:a6652b6bd51303902494998626653671703c420f6f4c88cfd3f50ed723e9d021", size = 156538 }, +] + +[[package]] +name = "google-api-python-client" +version = "2.151.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "httplib2" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/87/5a753c932a962f1ac72403608b6840500187fd9d856127a360b7a30c59ec/google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034", size = 12030480 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/32/675ec68ed1bd27664d74f980cd262504603da0b683c2dd09c8725f576236/google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c", size = 12534219 }, +] + +[[package]] +name = "google-auth" +version = "2.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/37/c854a8b1b1020cf042db3d67577c6f84cd1e8ff6515e4f5498ae9e444ea5/google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a", size = 267223 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1f/3a72917afcb0d5cd842cbccb81bf7a8a7b45b4c66d8dc4556ccb3b016bfc/google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f", size = 208968 }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "httplib2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253 }, +] + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "requests-oauthlib" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/0f/1772edb8d75ecf6280f1c7f51cbcebe274e8b17878b382f63738fd96cee5/google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263", size = 24970 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/8e/22a28dfbd218033e4eeaf3a0533b2b54852b6530da0c0fe934f0cc494b29/google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f", size = 24930 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.65.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/3b/1599ceafa875ffb951480c8c74f4b77646a6b80e80970698f2aa93c216ce/googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0", size = 113657 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/08/49bfe7cf737952cc1a9c43e80cc258ed45dad7f183c5b8276fc94cb3862d/googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63", size = 220890 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "hatch" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "hatchling" }, + { name = "httpx" }, + { name = "hyperlink" }, + { name = "keyring" }, + { name = "packaging" }, + { name = "pexpect" }, + { name = "platformdirs" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "tomli-w" }, + { name = "tomlkit" }, + { name = "userpath" }, + { name = "virtualenv" }, + { name = "zstandard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/98/63bf6c592b65f67201db292489053b86310cfb107eb095d345398e00cbd3/hatch-1.9.4.tar.gz", hash = "sha256:9bb7d1c4a7a51cc1f9e16394875c940b45fa84b698f0291529316b27d74e7f32", size = 689598 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/38/ba8f90264d19ed39851f37a22f2a4be8e9644a1203f114b16647f954bb02/hatch-1.9.4-py3-none-any.whl", hash = "sha256:461eb86b4b46249e38a9a621c7239e61285fd8e14b5a1b5a727c394893a25300", size = 110812 }, +] + +[[package]] +name = "hatchling" +version = "1.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "editables" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "trove-classifiers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/a1/7dd1caa87c0b15c04c6291e25112e5d082cce02ee87f221a8be1d594f857/hatchling-1.21.1.tar.gz", hash = "sha256:bba440453a224e7d4478457fa2e8d8c3633765bafa02975a6b53b9bf917980bc", size = 58059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bb/40528a09a33845bd7fd75c33b3be7faec3b5c8f15f68a58931da67420fb9/hatchling-1.21.1-py3-none-any.whl", hash = "sha256:21e8c13f8458b219a91cb84e5b61c15bf786695d1c4fabc29e91e78f94bfe892", size = 76740 }, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/44/ed0fa6a17845fb033bd885c03e842f08c1b9406c86a2e60ac1ae1b9206a6/httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f", size = 85180 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/89/b161908e2f51be56568184aeb4a880fd287178d176fd1c860d2217f41106/httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f", size = 78011 }, +] + +[[package]] +name = "httplib2" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854 }, +] + +[[package]] +name = "httpx" +version = "0.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, +] + +[[package]] +name = "hyperlink" +version = "21.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/51/1947bd81d75af87e3bb9e34593a4cf118115a8feb451ce7a69044ef1412e/hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b", size = 140743 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/aa/8caf6a0a3e62863cbb9dab27135660acba46903b703e224f14f447e57934/hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4", size = 74638 }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "inputimeout" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/9c/1646ca469bc2dc299ac393c8d31136c6c22a35ca1e373fa462ac01100d37/inputimeout-1.0.4-py3-none-any.whl", hash = "sha256:f4e23d27753cfc25268eefc8d52a3edc46280ad831d226617c51882423475a43", size = 4639 }, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777 }, +] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825 }, +] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/23/9894b3df5d0a6eb44611c36aec777823fc2e07740dabbd0b810e19594013/jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d", size = 19159 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/4f/24b319316142c44283d7540e76c7b5a6dbd5db623abd86bb7b3491c21018/jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649", size = 10187 }, +] + +[[package]] +name = "jeepney" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/f4/154cf374c2daf2020e05c3c6a03c91348d59b23c5366e968feb198306fdf/jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806", size = 106005 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755", size = 48435 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/db/58f950c996c793472e336ff3655b13fbcf1e3b359dcf52dcf3ed3b52c352/jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272", size = 15561 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/0f/8910b19ac0670a0f80ce1008e5e751c4a57e14d2c4c13a482aa6079fa9d6/jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf", size = 18459 }, +] + +[[package]] +name = "keyring" +version = "25.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/24/64447b13df6a0e2797b586dad715766d756c932ce8ace7f67bd384d76ae0/keyring-25.5.0.tar.gz", hash = "sha256:4c753b3ec91717fe713c4edd522d625889d8973a349b0e582622f49766de58e6", size = 62675 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/c9/353c156fa2f057e669106e5d6bcdecf85ef8d3536ce68ca96f18dc7b6d6f/keyring-25.5.0-py3-none-any.whl", hash = "sha256:e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741", size = 39096 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344 }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389 }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607 }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728 }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826 }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843 }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219 }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946 }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063 }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "more-itertools" +version = "10.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/78/65922308c4248e0eb08ebcbe67c95d48615cc6f27854b6f2e57143e9178f/more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6", size = 121020 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/7e/3a64597054a70f7c86eb0a7d4fc315b8c1ab932f64883a297bdffeb5f967/more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef", size = 60952 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "nh3" +version = "0.2.18" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/62/73/10df50b42ddb547a907deeb2f3c9823022580a7a47281e8eae8e003a9639/nh3-0.2.18.tar.gz", hash = "sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4", size = 15028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/89/1daff5d9ba5a95a157c092c7c5f39b8dd2b1ddb4559966f808d31cfb67e0/nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86", size = 1374474 }, + { url = "https://files.pythonhosted.org/packages/2c/b6/42fc3c69cabf86b6b81e4c051a9b6e249c5ba9f8155590222c2622961f58/nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811", size = 694573 }, + { url = "https://files.pythonhosted.org/packages/45/b9/833f385403abaf0023c6547389ec7a7acf141ddd9d1f21573723a6eab39a/nh3-0.2.18-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200", size = 844082 }, + { url = "https://files.pythonhosted.org/packages/05/2b/85977d9e11713b5747595ee61f381bc820749daf83f07b90b6c9964cf932/nh3-0.2.18-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164", size = 782460 }, + { url = "https://files.pythonhosted.org/packages/72/f2/5c894d5265ab80a97c68ca36f25c8f6f0308abac649aaf152b74e7e854a8/nh3-0.2.18-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189", size = 879827 }, + { url = "https://files.pythonhosted.org/packages/ab/a7/375afcc710dbe2d64cfbd69e31f82f3e423d43737258af01f6a56d844085/nh3-0.2.18-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad", size = 841080 }, + { url = "https://files.pythonhosted.org/packages/c2/a8/3bb02d0c60a03ad3a112b76c46971e9480efa98a8946677b5a59f60130ca/nh3-0.2.18-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b", size = 924144 }, + { url = "https://files.pythonhosted.org/packages/1b/63/6ab90d0e5225ab9780f6c9fb52254fa36b52bb7c188df9201d05b647e5e1/nh3-0.2.18-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307", size = 769192 }, + { url = "https://files.pythonhosted.org/packages/a4/17/59391c28580e2c32272761629893e761442fc7666da0b1cdb479f3b67b88/nh3-0.2.18-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f", size = 791042 }, + { url = "https://files.pythonhosted.org/packages/a3/da/0c4e282bc3cff4a0adf37005fa1fb42257673fbc1bbf7d1ff639ec3d255a/nh3-0.2.18-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe", size = 1010073 }, + { url = "https://files.pythonhosted.org/packages/de/81/c291231463d21da5f8bba82c8167a6d6893cc5419b0639801ee5d3aeb8a9/nh3-0.2.18-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a", size = 1029782 }, + { url = "https://files.pythonhosted.org/packages/63/1d/842fed85cf66c973be0aed8770093d6a04741f65e2c388ddd4c07fd3296e/nh3-0.2.18-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50", size = 942504 }, + { url = "https://files.pythonhosted.org/packages/eb/61/73a007c74c37895fdf66e0edcd881f5eaa17a348ff02f4bb4bc906d61085/nh3-0.2.18-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204", size = 941541 }, + { url = "https://files.pythonhosted.org/packages/78/48/54a788fc9428e481b2f58e0cd8564f6c74ffb6e9ef73d39e8acbeae8c629/nh3-0.2.18-cp37-abi3-win32.whl", hash = "sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be", size = 573750 }, + { url = "https://files.pythonhosted.org/packages/26/8d/53c5b19c4999bdc6ba95f246f4ef35ca83d7d7423e5e38be43ad66544e5d/nh3-0.2.18-cp37-abi3-win_amd64.whl", hash = "sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844", size = 579012 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/fa/fbf4001037904031639e6bfbfc02badfc7e12f137a8afa254df6c4c8a670/oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918", size = 177352 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/80/cab10959dc1faead58dc8384a781dfbf93cb4d33d50988f7a69f1b7c9bbe/oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", size = 151688 }, +] + +[[package]] +name = "packaging" +version = "24.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, +] + +[[package]] +name = "pip" +version = "24.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/b422acd212ad7eedddaf7981eee6e5de085154ff726459cf2da7c5a184c1/pip-24.3.1.tar.gz", hash = "sha256:ebcb60557f2aefabc2e0f918751cd24ea0d56d8ec5445fe1807f1d2109660b99", size = 1931073 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/7d/500c9ad20238fcfcb4cb9243eede163594d7020ce87bd9610c9e02771876/pip-24.3.1-py3-none-any.whl", hash = "sha256:3790624780082365f47549d032f3770eeb2b1e8bd1f7b2e02dace1afa361b4ed", size = 1822182 }, +] + +[[package]] +name = "pipx" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "userpath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/21/dd6b9a9c4f0cb659ce3dad991f0e8dde852b2c81922224ef77df4222ab7a/pipx-1.7.1.tar.gz", hash = "sha256:762de134e16a462be92645166d225ecef446afaef534917f5f70008d63584360", size = 291889 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/af/66db02a214590a841bcd1df1f02f7ef818dc3f43487acddab0b8c40b25d2/pipx-1.7.1-py3-none-any.whl", hash = "sha256:3933c43bb344e649cb28e10d357e0967ce8572f1c19caf90cf39ae95c2a0afaf", size = 78749 }, +] + +[[package]] +name = "pkginfo" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/72/347ec5be4adc85c182ed2823d8d1c7b51e13b9a6b0c1aae59582eca652df/pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297", size = 378457 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/09/054aea9b7534a15ad38a363a2bd974c20646ab1582a387a95b8df1bfea1c/pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097", size = 30392 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pre-commit" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, +] + +[[package]] +name = "pre-commit-uv" +version = "4.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pre-commit" }, + { name = "uv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/6c/c3c1d01698c8abb0b546defc0304971fa7fb2ba84ad35587b9dad095d73f/pre_commit_uv-4.1.4.tar.gz", hash = "sha256:3db606a79b226127b27dbbd8381b78c0e30de3ac775a8492c576a68e9250535c", size = 6493 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/70/1b65f9118ef64f6ffe5d57a67170bbff25d4f4a3d1cb78e8ed3392e16114/pre_commit_uv-4.1.4-py3-none-any.whl", hash = "sha256:7f01fb494fa1caa5097d20a38f71df7cea0209197b2564699cef9b3f3aa9d135", size = 5578 }, +] + +[[package]] +name = "proto-plus" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/05/74417b2061e1bf1b82776037cad97094228fa1c1b6e82d08a78d3fb6ddb6/proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91", size = 56124 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/25/0b7cc838ae3d76d46539020ec39fc92bfc9acc29367e58fe912702c2a79e/proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961", size = 50126 }, +] + +[[package]] +name = "protobuf" +version = "5.28.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/6e/e69eb906fddcb38f8530a12f4b410699972ab7ced4e21524ece9d546ac27/protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b", size = 422479 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c5/05163fad52d7c43e124a545f1372d18266db36036377ad29de4271134a6a/protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24", size = 419624 }, + { url = "https://files.pythonhosted.org/packages/9c/4c/4563ebe001ff30dca9d7ed12e471fa098d9759712980cde1fd03a3a44fb7/protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868", size = 431464 }, + { url = "https://files.pythonhosted.org/packages/1c/f2/baf397f3dd1d3e4af7e3f5a0382b868d25ac068eefe1ebde05132333436c/protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687", size = 414743 }, + { url = "https://files.pythonhosted.org/packages/85/50/cd61a358ba1601f40e7d38bcfba22e053f40ef2c50d55b55926aecc8fec7/protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584", size = 316511 }, + { url = "https://files.pythonhosted.org/packages/5d/ae/3257b09328c0b4e59535e497b0c7537d4954038bdd53a2f0d2f49d15a7c4/protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135", size = 316624 }, + { url = "https://files.pythonhosted.org/packages/57/b5/ee3d918f536168def73b3f49edeba065429ab3a7e7b033d33e69c46ddff9/protobuf-5.28.3-cp39-cp39-win32.whl", hash = "sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535", size = 419648 }, + { url = "https://files.pythonhosted.org/packages/53/54/e1bdf6f1d29828ddb6aca0a83bf208ab1d5f88126f34e17e487b2cd20d93/protobuf-5.28.3-cp39-cp39-win_amd64.whl", hash = "sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36", size = 431591 }, + { url = "https://files.pythonhosted.org/packages/ad/c3/2377c159e28ea89a91cf1ca223f827ae8deccb2c9c401e5ca233cd73002f/protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed", size = 169511 }, +] + +[[package]] +name = "psutil" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pygithub" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "pynacl" }, + { name = "requests" }, + { name = "typing-extensions" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/a0/1e8b8ca88df9857836f5bf8e3ee15dfb810d19814ef700b12f99ce11f691/pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051", size = 3476673 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/f3/e185613c411757c0c18b904ea2db173f2872397eddf444a3fe8cdde47077/PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24", size = 362599 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pyjwt" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344 }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pynacl" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920 }, + { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722 }, + { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087 }, + { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678 }, + { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660 }, + { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824 }, + { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912 }, + { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624 }, + { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141 }, +] + +[[package]] +name = "pyparsing" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/e5aeee5387091148a19e1145f63606619cb5f20b83fccb63efae6474e7b2/pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c", size = 920984 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/ec/2eb3cd785efd67806c46c13a17339708ddc346cbb684eade7a6e6f79536a/pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", size = 106921 }, +] + +[[package]] +name = "pytest" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, +] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/c4/3c310a19bc1f1e9ef50075582652673ef2bfc8cd62afef9585683821902f/pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d", size = 84060 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/82/1d96bf03ee4c0fdc3c0cbe61470070e659ca78dc0086fb88b66c185e2449/pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7", size = 46108 }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777 }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318 }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891 }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614 }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360 }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006 }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577 }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593 }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312 }, +] + +[[package]] +name = "readme-renderer" +version = "44.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "nh3" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/a9/104ec9234c8448c4379768221ea6df01260cd6c2ce13182d4eac531c8342/readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1", size = 32056 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/67/921ec3024056483db83953ae8e48079ad62b92db7880013ca77632921dd0/readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", size = 13310 }, +] + +[[package]] +name = "referencing" +version = "0.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, +] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481 }, +] + +[[package]] +name = "rfc3986" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/40/1520d68bfa07ab5a6f065a186815fb6610c86fe957bc065754e47f7b0840/rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c", size = 49026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", size = 31326 }, +] + +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + +[[package]] +name = "rich-click" +version = "1.8.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/a9/a1f1af87e83832d794342fbc09c96cc7cd6798b8dfb8adfbe6ccbef8d70c/rich_click-1.8.3.tar.gz", hash = "sha256:6d75bdfa7aa9ed2c467789a0688bc6da23fbe3a143e19aa6ad3f8bac113d2ab3", size = 38209 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ea/5a0c5a8e6532e971983d1b0fc99268eb66a10f489da35d9022ce01044191/rich_click-1.8.3-py3-none-any.whl", hash = "sha256:636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd", size = 35032 }, +] + +[[package]] +name = "rpds-py" +version = "0.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/cb/8e919951f55d109d658f81c9b49d0cc3b48637c50792c5d2e77032b8c5da/rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350", size = 25931 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/0e/d7e7e9280988a7bc56fd326042baca27f4f55fad27dc8aa64e5e0e894e5d/rpds_py-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a649dfd735fff086e8a9d0503a9f0c7d01b7912a333c7ae77e1515c08c146dad", size = 327335 }, + { url = "https://files.pythonhosted.org/packages/4c/72/027185f213d53ae66765c575229829b202fbacf3d55fe2bd9ff4e29bb157/rpds_py-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f16bc1334853e91ddaaa1217045dd7be166170beec337576818461268a3de67f", size = 318250 }, + { url = "https://files.pythonhosted.org/packages/2b/e7/b4eb3e6ff541c83d3b46f45f855547e412ab60c45bef64520fafb00b9b42/rpds_py-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14511a539afee6f9ab492b543060c7491c99924314977a55c98bfa2ee29ce78c", size = 361206 }, + { url = "https://files.pythonhosted.org/packages/e7/80/cb9a4b4cad31bcaa37f38dae7a8be861f767eb2ca4f07a146b5ffcfbee09/rpds_py-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ccb8ac2d3c71cda472b75af42818981bdacf48d2e21c36331b50b4f16930163", size = 369921 }, + { url = "https://files.pythonhosted.org/packages/95/1b/463b11e7039e18f9e778568dbf7338c29bbc1f8996381115201c668eb8c8/rpds_py-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c142b88039b92e7e0cb2552e8967077e3179b22359e945574f5e2764c3953dcf", size = 403673 }, + { url = "https://files.pythonhosted.org/packages/86/98/1ef4028e9d5b76470bf7f8f2459be07ac5c9621270a2a5e093f8d8a8cc2c/rpds_py-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f19169781dddae7478a32301b499b2858bc52fc45a112955e798ee307e294977", size = 430267 }, + { url = "https://files.pythonhosted.org/packages/25/8e/41d7e3e6d3a4a6c94375020477705a3fbb6515717901ab8f94821cf0a0d9/rpds_py-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13c56de6518e14b9bf6edde23c4c39dac5b48dcf04160ea7bce8fca8397cdf86", size = 360569 }, + { url = "https://files.pythonhosted.org/packages/4f/6a/8839340464d4e1bbfaf0482e9d9165a2309c2c17427e4dcb72ce3e5cc5d6/rpds_py-0.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:925d176a549f4832c6f69fa6026071294ab5910e82a0fe6c6228fce17b0706bd", size = 382584 }, + { url = "https://files.pythonhosted.org/packages/64/96/7a7f938d3796a6a3ec08ed0e8a5ecd436fbd516a3684ab1fa22d46d6f6cc/rpds_py-0.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78f0b6877bfce7a3d1ff150391354a410c55d3cdce386f862926a4958ad5ab7e", size = 546560 }, + { url = "https://files.pythonhosted.org/packages/15/c7/19fb4f1247a3c90a99eca62909bf76ee988f9b663e47878a673d9854ec5c/rpds_py-0.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dd645e2b0dcb0fd05bf58e2e54c13875847687d0b71941ad2e757e5d89d4356", size = 549359 }, + { url = "https://files.pythonhosted.org/packages/d2/4c/445eb597a39a883368ea2f341dd6e48a9d9681b12ebf32f38a827b30529b/rpds_py-0.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4f676e21db2f8c72ff0936f895271e7a700aa1f8d31b40e4e43442ba94973899", size = 527567 }, + { url = "https://files.pythonhosted.org/packages/4f/71/4c44643bffbcb37311fc7fe221bcf139c8d660bc78f746dd3a05741372c8/rpds_py-0.20.1-cp310-none-win32.whl", hash = "sha256:648386ddd1e19b4a6abab69139b002bc49ebf065b596119f8f37c38e9ecee8ff", size = 200412 }, + { url = "https://files.pythonhosted.org/packages/f4/33/9d0529d74099e090ec9ab15eb0a049c56cca599eaaca71bfedbdbca656a9/rpds_py-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:d9ecb51120de61e4604650666d1f2b68444d46ae18fd492245a08f53ad2b7711", size = 218563 }, + { url = "https://files.pythonhosted.org/packages/a0/2e/a6ded84019a05b8f23e0fe6a632f62ae438a8c5e5932d3dfc90c73418414/rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75", size = 327194 }, + { url = "https://files.pythonhosted.org/packages/68/11/d3f84c69de2b2086be3d6bd5e9d172825c096b13842ab7e5f8f39f06035b/rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712", size = 318126 }, + { url = "https://files.pythonhosted.org/packages/18/c0/13f1bce9c901511e5e4c0b77a99dbb946bb9a177ca88c6b480e9cb53e304/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a", size = 361119 }, + { url = "https://files.pythonhosted.org/packages/06/31/3bd721575671f22a37476c2d7b9e34bfa5185bdcee09f7fedde3b29f3adb/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93", size = 369532 }, + { url = "https://files.pythonhosted.org/packages/20/22/3eeb0385f33251b4fd0f728e6a3801dc8acc05e714eb7867cefe635bf4ab/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751", size = 403703 }, + { url = "https://files.pythonhosted.org/packages/10/e1/8dde6174e7ac5b9acd3269afca2e17719bc7e5088c68f44874d2ad9e4560/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535", size = 429868 }, + { url = "https://files.pythonhosted.org/packages/19/51/a3cc1a5238acfc2582033e8934d034301f9d4931b9bf7c7ccfabc4ca0880/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0", size = 360539 }, + { url = "https://files.pythonhosted.org/packages/cd/8c/3c87471a44bd4114e2b0aec90f298f6caaac4e8db6af904d5dd2279f5c61/rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e", size = 382467 }, + { url = "https://files.pythonhosted.org/packages/d0/9b/95073fe3e0f130e6d561e106818b6568ef1f2df3352e7f162ab912da837c/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8", size = 546669 }, + { url = "https://files.pythonhosted.org/packages/de/4c/7ab3669e02bb06fedebcfd64d361b7168ba39dfdf385e4109440f2e7927b/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4", size = 549304 }, + { url = "https://files.pythonhosted.org/packages/f1/e8/ad5da336cd42adbdafe0ecd40dcecdae01fd3d703c621c7637615a008d3a/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3", size = 527637 }, + { url = "https://files.pythonhosted.org/packages/02/f1/1b47b9e5b941c2659c9b7e4ef41b6f07385a6500c638fa10c066e4616ecb/rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732", size = 200488 }, + { url = "https://files.pythonhosted.org/packages/85/f6/c751c1adfa31610055acfa1cc667cf2c2d7011a73070679c448cf5856905/rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84", size = 218475 }, + { url = "https://files.pythonhosted.org/packages/e7/10/4e8dcc08b58a548098dbcee67a4888751a25be7a6dde0a83d4300df48bfa/rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17", size = 329749 }, + { url = "https://files.pythonhosted.org/packages/d2/e4/61144f3790e12fd89e6153d77f7915ad26779735fef8ee9c099cba6dfb4a/rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c", size = 321032 }, + { url = "https://files.pythonhosted.org/packages/fa/e0/99205aabbf3be29ef6c58ef9b08feed51ba6532fdd47461245cb58dd9897/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d", size = 363931 }, + { url = "https://files.pythonhosted.org/packages/ac/bd/bce2dddb518b13a7e77eed4be234c9af0c9c6d403d01c5e6ae8eb447ab62/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f", size = 373343 }, + { url = "https://files.pythonhosted.org/packages/43/15/112b7c553066cb91264691ba7fb119579c440a0ae889da222fa6fc0d411a/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01", size = 406304 }, + { url = "https://files.pythonhosted.org/packages/af/8d/2da52aef8ae5494a382b0c0025ba5b68f2952db0f2a4c7534580e8ca83cc/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a", size = 423022 }, + { url = "https://files.pythonhosted.org/packages/c8/1b/f23015cb293927c93bdb4b94a48bfe77ad9d57359c75db51f0ff0cf482ff/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb", size = 364937 }, + { url = "https://files.pythonhosted.org/packages/7b/8b/6da8636b2ea2e2f709e56656e663b6a71ecd9a9f9d9dc21488aade122026/rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa", size = 386301 }, + { url = "https://files.pythonhosted.org/packages/20/af/2ae192797bffd0d6d558145b5a36e7245346ff3e44f6ddcb82f0eb8512d4/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc", size = 549452 }, + { url = "https://files.pythonhosted.org/packages/07/dd/9f6520712a5108cd7d407c9db44a3d59011b385c58e320d58ebf67757a9e/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd", size = 554370 }, + { url = "https://files.pythonhosted.org/packages/5e/0e/b1bdc7ea0db0946d640ab8965146099093391bb5d265832994c47461e3c5/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5", size = 530940 }, + { url = "https://files.pythonhosted.org/packages/ae/d3/ffe907084299484fab60a7955f7c0e8a295c04249090218c59437010f9f4/rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c", size = 203164 }, + { url = "https://files.pythonhosted.org/packages/1f/ba/9cbb57423c4bfbd81c473913bebaed151ad4158ee2590a4e4b3e70238b48/rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb", size = 220750 }, + { url = "https://files.pythonhosted.org/packages/b5/01/fee2e1d1274c92fff04aa47d805a28d62c2aa971d1f49f5baea1c6e670d9/rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e", size = 329359 }, + { url = "https://files.pythonhosted.org/packages/b0/cf/4aeffb02b7090029d7aeecbffb9a10e1c80f6f56d7e9a30e15481dc4099c/rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c", size = 320543 }, + { url = "https://files.pythonhosted.org/packages/17/69/85cf3429e9ccda684ba63ff36b5866d5f9451e921cc99819341e19880334/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc", size = 363107 }, + { url = "https://files.pythonhosted.org/packages/ef/de/7df88dea9c3eeb832196d23b41f0f6fc5f9a2ee9b2080bbb1db8731ead9c/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8", size = 372027 }, + { url = "https://files.pythonhosted.org/packages/d1/b8/88675399d2038580743c570a809c43a900e7090edc6553f8ffb66b23c965/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d", size = 405031 }, + { url = "https://files.pythonhosted.org/packages/e1/aa/cca639f6d17caf00bab51bdc70fcc0bdda3063e5662665c4fdf60443c474/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982", size = 422271 }, + { url = "https://files.pythonhosted.org/packages/c4/07/bf8a949d2ec4626c285579c9d6b356c692325f1a4126e947736b416e1fc4/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496", size = 363625 }, + { url = "https://files.pythonhosted.org/packages/11/f0/06675c6a58d6ce34547879138810eb9aab0c10e5607ea6c2e4dc56b703c8/rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4", size = 385906 }, + { url = "https://files.pythonhosted.org/packages/bf/ac/2d1f50374eb8e41030fad4e87f81751e1c39e3b5d4bee8c5618830d8a6ac/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7", size = 549021 }, + { url = "https://files.pythonhosted.org/packages/f7/d4/a7d70a7cc71df772eeadf4bce05e32e780a9fe44a511a5b091c7a85cb767/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a", size = 553800 }, + { url = "https://files.pythonhosted.org/packages/87/81/dc30bc449ccba63ad23a0f6633486d4e0e6955f45f3715a130dacabd6ad0/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb", size = 531076 }, + { url = "https://files.pythonhosted.org/packages/50/80/fb62ab48f3b5cfe704ead6ad372da1922ddaa76397055e02eb507054c979/rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782", size = 202804 }, + { url = "https://files.pythonhosted.org/packages/d9/30/a3391e76d0b3313f33bdedd394a519decae3a953d2943e3dabf80ae32447/rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e", size = 220502 }, + { url = "https://files.pythonhosted.org/packages/d6/87/e7e0fcbfdc0d0e261534bcc885f6ae6253095b972e32f8b8b1278c78a2a9/rpds_py-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b41b6321805c472f66990c2849e152aff7bc359eb92f781e3f606609eac877ad", size = 327867 }, + { url = "https://files.pythonhosted.org/packages/93/a0/17836b7961fc82586e9b818abdee2a27e2e605a602bb8c0d43f02092f8c2/rpds_py-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a90c373ea2975519b58dece25853dbcb9779b05cc46b4819cb1917e3b3215b6", size = 318893 }, + { url = "https://files.pythonhosted.org/packages/dc/03/deb81d8ea3a8b974e7b03cfe8c8c26616ef8f4980dd430d8dd0a2f1b4d8e/rpds_py-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d4477bcb9fbbd7b5b0e4a5d9b493e42026c0bf1f06f723a9353f5153e75d30", size = 361664 }, + { url = "https://files.pythonhosted.org/packages/16/49/d9938603731745c7b6babff97ca61ff3eb4619e7128b5ab0111ad4e91d6d/rpds_py-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b8382a90539910b53a6307f7c35697bc7e6ffb25d9c1d4e998a13e842a5e83", size = 369796 }, + { url = "https://files.pythonhosted.org/packages/87/d2/480b36c69cdc373853401b6aab6a281cf60f6d72b1545d82c0d23d9dd77c/rpds_py-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4888e117dd41b9d34194d9e31631af70d3d526efc363085e3089ab1a62c32ed1", size = 403860 }, + { url = "https://files.pythonhosted.org/packages/31/7c/f6d909cb57761293308dbef14f1663d84376f2e231892a10aafc57b42037/rpds_py-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5265505b3d61a0f56618c9b941dc54dc334dc6e660f1592d112cd103d914a6db", size = 430793 }, + { url = "https://files.pythonhosted.org/packages/d4/62/c9bd294c4b5f84d9cc2c387b548ae53096ad7e71ac5b02b6310e9dc85aa4/rpds_py-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e75ba609dba23f2c95b776efb9dd3f0b78a76a151e96f96cc5b6b1b0004de66f", size = 360927 }, + { url = "https://files.pythonhosted.org/packages/c1/a7/15d927d83a44da8307a432b1cac06284b6657706d099a98cc99fec34ad51/rpds_py-0.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1791ff70bc975b098fe6ecf04356a10e9e2bd7dc21fa7351c1742fdeb9b4966f", size = 382660 }, + { url = "https://files.pythonhosted.org/packages/4c/28/0630719c18456238bb07d59c4302fed50a13aa8035ec23dbfa80d116f9bc/rpds_py-0.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d126b52e4a473d40232ec2052a8b232270ed1f8c9571aaf33f73a14cc298c24f", size = 546888 }, + { url = "https://files.pythonhosted.org/packages/b9/75/3c9bda11b9c15d680b315f898af23825159314d4b56568f24b53ace8afcd/rpds_py-0.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c14937af98c4cc362a1d4374806204dd51b1e12dded1ae30645c298e5a5c4cb1", size = 550088 }, + { url = "https://files.pythonhosted.org/packages/70/f1/8fe7d04c194218171220a412057429defa9e2da785de0777c4d39309337e/rpds_py-0.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d089d0b88996df627693639d123c8158cff41c0651f646cd8fd292c7da90eaf", size = 528270 }, + { url = "https://files.pythonhosted.org/packages/d6/62/41b0020f4b00af042b008e679dbe25a2f5bce655139a81f8b812f9068e52/rpds_py-0.20.1-cp39-none-win32.whl", hash = "sha256:653647b8838cf83b2e7e6a0364f49af96deec64d2a6578324db58380cff82aca", size = 200658 }, + { url = "https://files.pythonhosted.org/packages/05/01/e64bb8889f2dcc951e53de33d8b8070456397ae4e10edc35e6cb9908f5c8/rpds_py-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:fa41a64ac5b08b292906e248549ab48b69c5428f3987b09689ab2441f267d04d", size = 218883 }, + { url = "https://files.pythonhosted.org/packages/b6/fa/7959429e69569d0f6e7d27f80451402da0409349dd2b07f6bcbdd5fad2d3/rpds_py-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a07ced2b22f0cf0b55a6a510078174c31b6d8544f3bc00c2bcee52b3d613f74", size = 328209 }, + { url = "https://files.pythonhosted.org/packages/25/97/5dfdb091c30267ff404d2fd9e70c7a6d6ffc65ca77fffe9456e13b719066/rpds_py-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68cb0a499f2c4a088fd2f521453e22ed3527154136a855c62e148b7883b99f9a", size = 319499 }, + { url = "https://files.pythonhosted.org/packages/7c/98/cf2608722400f5f9bb4c82aa5ac09026f3ac2ebea9d4059d3533589ed0b6/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3060d885657abc549b2a0f8e1b79699290e5d83845141717c6c90c2df38311", size = 361795 }, + { url = "https://files.pythonhosted.org/packages/89/de/0e13dd43c785c60e63933e96fbddda0b019df6862f4d3019bb49c3861131/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f3b65d2392e1c5cec27cff08fdc0080270d5a1a4b2ea1d51d5f4a2620ff08d", size = 370604 }, + { url = "https://files.pythonhosted.org/packages/8a/fc/fe3c83c77f82b8059eeec4e998064913d66212b69b3653df48f58ad33d3d/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cc3712a4b0b76a1d45a9302dd2f53ff339614b1c29603a911318f2357b04dd2", size = 404177 }, + { url = "https://files.pythonhosted.org/packages/94/30/5189518bfb80a41f664daf32b46645c7fbdcc89028a0f1bfa82e806e0fbb/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d4eea0761e37485c9b81400437adb11c40e13ef513375bbd6973e34100aeb06", size = 430108 }, + { url = "https://files.pythonhosted.org/packages/67/0e/6f069feaff5c298375cd8c55e00ecd9bd79c792ce0893d39448dc0097857/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f5179583d7a6cdb981151dd349786cbc318bab54963a192692d945dd3f6435d", size = 361184 }, + { url = "https://files.pythonhosted.org/packages/27/9f/ce3e2ae36f392c3ef1988c06e9e0b4c74f64267dad7c223003c34da11adb/rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fbb0ffc754490aff6dabbf28064be47f0f9ca0b9755976f945214965b3ace7e", size = 384140 }, + { url = "https://files.pythonhosted.org/packages/5f/d5/89d44504d0bc7a1135062cb520a17903ff002f458371b8d9160af3b71e52/rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a94e52537a0e0a85429eda9e49f272ada715506d3b2431f64b8a3e34eb5f3e75", size = 546589 }, + { url = "https://files.pythonhosted.org/packages/8f/8f/e1c2db4fcca3947d9a28ec9553700b4dc8038f0eff575f579e75885b0661/rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:92b68b79c0da2a980b1c4197e56ac3dd0c8a149b4603747c4378914a68706979", size = 550059 }, + { url = "https://files.pythonhosted.org/packages/67/29/00a9e986df36721b5def82fff60995c1ee8827a7d909a6ec8929fb4cc668/rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93da1d3db08a827eda74356f9f58884adb254e59b6664f64cc04cdff2cc19b0d", size = 529131 }, + { url = "https://files.pythonhosted.org/packages/a3/32/95364440560ec476b19c6a2704259e710c223bf767632ebaa72cc2a1760f/rpds_py-0.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:754bbed1a4ca48479e9d4182a561d001bbf81543876cdded6f695ec3d465846b", size = 219677 }, + { url = "https://files.pythonhosted.org/packages/ed/bf/ad8492e972c90a3d48a38e2b5095c51a8399d5b57e83f2d5d1649490f72b/rpds_py-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca449520e7484534a2a44faf629362cae62b660601432d04c482283c47eaebab", size = 328046 }, + { url = "https://files.pythonhosted.org/packages/75/fd/84f42386165d6d555acb76c6d39c90b10c9dcf25116daf4f48a0a9d6867a/rpds_py-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9c4cb04a16b0f199a8c9bf807269b2f63b7b5b11425e4a6bd44bd6961d28282c", size = 319306 }, + { url = "https://files.pythonhosted.org/packages/6c/8a/abcd5119a0573f9588ad71a3fde3c07ddd1d1393cfee15a6ba7495c256f1/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb63804105143c7e24cee7db89e37cb3f3941f8e80c4379a0b355c52a52b6780", size = 362558 }, + { url = "https://files.pythonhosted.org/packages/9d/65/1c2bb10afd4bd32800227a658ae9097bc1d08a4e5048a57a9bd2efdf6306/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55cd1fa4ecfa6d9f14fbd97ac24803e6f73e897c738f771a9fe038f2f11ff07c", size = 370811 }, + { url = "https://files.pythonhosted.org/packages/6c/ee/f4bab2b9e51ced30351cfd210647885391463ae682028c79760e7db28e4e/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f8f741b6292c86059ed175d80eefa80997125b7c478fb8769fd9ac8943a16c0", size = 404660 }, + { url = "https://files.pythonhosted.org/packages/48/0f/9d04d0939682f0c97be827fc51ff986555ffb573e6781bd5132441f0ce25/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fc212779bf8411667234b3cdd34d53de6c2b8b8b958e1e12cb473a5f367c338", size = 430490 }, + { url = "https://files.pythonhosted.org/packages/0d/f2/e9b90fd8416d59941b6a12f2c2e1d898b63fd092f5a7a6f98236cb865764/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ad56edabcdb428c2e33bbf24f255fe2b43253b7d13a2cdbf05de955217313e6", size = 361448 }, + { url = "https://files.pythonhosted.org/packages/0b/83/1cc776dce7bedb17d6f4ea62eafccee8a57a4678f4fac414ab69fb9b6b0b/rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a3a1e9ee9728b2c1734f65d6a1d376c6f2f6fdcc13bb007a08cc4b1ff576dc5", size = 383681 }, + { url = "https://files.pythonhosted.org/packages/17/5c/e0cdd6b0a8373fdef3667af2778dd9ff3abf1bbb9c7bd92c603c91440eb0/rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e13de156137b7095442b288e72f33503a469aa1980ed856b43c353ac86390519", size = 546203 }, + { url = "https://files.pythonhosted.org/packages/1b/a8/81fc9cbc01e7ef6d10652aedc1de4e8473934773e2808ba49094e03575df/rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:07f59760ef99f31422c49038964b31c4dfcfeb5d2384ebfc71058a7c9adae2d2", size = 549855 }, + { url = "https://files.pythonhosted.org/packages/b3/87/99648693d3c1bbce088119bc61ecaab62e5f9c713894edc604ffeca5ae88/rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:59240685e7da61fb78f65a9f07f8108e36a83317c53f7b276b4175dc44151684", size = 528625 }, + { url = "https://files.pythonhosted.org/packages/05/c3/10c68a08849f1fa45d205e54141fa75d316013e3d701ef01770ee1220bb8/rpds_py-0.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:83cba698cfb3c2c5a7c3c6bac12fe6c6a51aae69513726be6411076185a8b24a", size = 219991 }, +] + +[[package]] +name = "rsa" +version = "4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221 }, +] + +[[package]] +name = "semver" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/6c/a536cc008f38fd83b3c1b98ce19ead13b746b5588c9a0cb9dd9f6ea434bc/semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc", size = 214988 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/77/0cc7a8a3bc7e53d07e8f47f147b92b0960e902b8254859f4aee5c4d7866b/semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4", size = 17099 }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, +] + +[[package]] +name = "smmap" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/04/b5bf6d21dc4041000ccba7eb17dd3055feb237e7ffc2c20d3fae3af62baa/smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62", size = 22291 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/a5/10f97f73544edcdef54409f1d839f6049a0d79df68adbc1ceb24d1aaca42/smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da", size = 24282 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, +] + +[[package]] +name = "tomli" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 }, +] + +[[package]] +name = "tomli-w" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/19/b65f1a088ee23e37cdea415b357843eca8b1422a7b11a9eee6e35d4ec273/tomli_w-1.1.0.tar.gz", hash = "sha256:49e847a3a304d516a169a601184932ef0f6b61623fe680f836a2aa7128ed0d33", size = 6929 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ac/ce90573ba446a9bbe65838ded066a805234d159b4446ae9f8ec5bbd36cbd/tomli_w-1.1.0-py3-none-any.whl", hash = "sha256:1403179c78193e3184bfaade390ddbd071cba48a32a2e62ba11aae47490c63f7", size = 6440 }, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, +] + +[[package]] +name = "trove-classifiers" +version = "2024.10.21.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/99/85/92c2667cf221b37648041ce9319427f92fa76cbec634aad844e67e284706/trove_classifiers-2024.10.21.16.tar.gz", hash = "sha256:17cbd055d67d5e9d9de63293a8732943fabc21574e4c7b74edf112b4928cf5f3", size = 16153 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/35/5055ab8d215af853d07bbff1a74edf48f91ed308f037380a5ca52dd73348/trove_classifiers-2024.10.21.16-py3-none-any.whl", hash = "sha256:0fb11f1e995a757807a8ef1c03829fbd4998d817319abcef1f33165750f103be", size = 13546 }, +] + +[[package]] +name = "twine" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "keyring" }, + { name = "pkginfo" }, + { name = "readme-renderer" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "rfc3986" }, + { name = "rich" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/68/bd982e5e949ef8334e6f7dcf76ae40922a8750aa2e347291ae1477a4782b/twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db", size = 225531 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/ec/00f9d5fd040ae29867355e559a94e9a8429225a0284a3f5f091a3878bfc0/twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997", size = 38650 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "uritemplate" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56226815abfa72fe0aa81c33bed16ed045647d6000eba/uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0", size = 273898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +] + +[[package]] +name = "userpath" +version = "1.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/b7/30753098208505d7ff9be5b3a32112fb8a4cb3ddfccbbb7ba9973f2e29ff/userpath-1.9.2.tar.gz", hash = "sha256:6c52288dab069257cc831846d15d48133522455d4677ee69a9781f11dbefd815", size = 11140 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/99/3ec6335ded5b88c2f7ed25c56ffd952546f7ed007ffb1e1539dc3b57015a/userpath-1.9.2-py3-none-any.whl", hash = "sha256:2cbf01a23d655a1ff8fc166dfb78da1b641d1ceabf0fe5f970767d380b14e89d", size = 9065 }, +] + +[[package]] +name = "uv" +version = "0.4.29" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/23/6e8d8177112b40d4905a49c03d397c5b93eb030f87cdddf0c5d4be599fc9/uv-0.4.29.tar.gz", hash = "sha256:9c559b6fdc042add463e86afa1c210716f7020bfc2e96b00df5af7afcb587ce7", size = 2102901 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/8d/78b6927a3e511a4bc05347714c8917896477537bf09a6301e84de08b7a59/uv-0.4.29-py3-none-linux_armv6l.whl", hash = "sha256:287dc3fd3f78093a5a82136f01cbd9f224e0905b38d3dcffdc96c08fbbe48ee9", size = 13250618 }, + { url = "https://files.pythonhosted.org/packages/d8/2f/1bbfc3c15933fcf07c222e063044696320f5a9fe3d5c584960ed0c490cf8/uv-0.4.29-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6b03859068aaa08ca9907a51d403d54b0a9d8054091646845a9192f213f099d4", size = 13316211 }, + { url = "https://files.pythonhosted.org/packages/fb/1a/1c862cc36f29cf58b22758f31eb5f9611ee86429d470c8e4c0fd235592ec/uv-0.4.29-py3-none-macosx_11_0_arm64.whl", hash = "sha256:950bbfe1954e9c3a5d6c4777bb778b4c23d0dea9ad9f77622c45d4fbba433355", size = 12363705 }, + { url = "https://files.pythonhosted.org/packages/a1/0e/76e947db1135fa2436b11cc1ca927de187601be7ec65b0102f42a6a58211/uv-0.4.29-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:3473b05142ba436ac30d036b7ab5e9bcfa97f63df5d1382f92e0a3e4aaa391bc", size = 12622825 }, + { url = "https://files.pythonhosted.org/packages/41/3d/b54226b11eb935e4e57585905cf3ded2ac7d972c551bef1c3a000d4c5e47/uv-0.4.29-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7060dfbad0bc26e9cecbb4f8482445c958071511f23728948478f81acfb29048", size = 13054445 }, + { url = "https://files.pythonhosted.org/packages/bf/00/02fa712a3991957d2a65d043173d06d3a429acb3c4e54976f4385c034d97/uv-0.4.29-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df35d9cbe4cfbb7bce287f56e3bb7a7cef0b7b5173ed889d936d4c470f2b1b83", size = 13655646 }, + { url = "https://files.pythonhosted.org/packages/61/85/f6796032396bbd350648747c984376c8c8add14c75476ed8d5a3438a9c76/uv-0.4.29-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfb797a87b55d96cc0593e9f29ab5d58454be74598ea0158e1b2f4f2dc97cede", size = 14281147 }, + { url = "https://files.pythonhosted.org/packages/17/48/3314a03c6580d0b05bd1b9122ff9a9fbde5163815cf84f5a220fc013cea1/uv-0.4.29-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:668d3e6095c6f0cac6a831ef4030f7ad79442d1c84b9569f01f50b60c2d51a77", size = 14004714 }, + { url = "https://files.pythonhosted.org/packages/11/e0/456bc5271f09ff385c57570628705757a59f9a3f8205ff029dc9b2213dbd/uv-0.4.29-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0be21afa0e582ddc5badff6ef40c3c6784efc5feae4ad568307b668d40dc49bd", size = 18032241 }, + { url = "https://files.pythonhosted.org/packages/ef/6c/db10ff7f178ee93a832941e1cddbf38bfb1b0e30fd07580db10eb909f19d/uv-0.4.29-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6224a322267570e0470c61008fd1c8e2f50bf073b339f4c3010da86aef3c44c", size = 13787528 }, + { url = "https://files.pythonhosted.org/packages/1b/cf/501cd6aeeae0413e83ed0c112a362e44c05fa01144ecfd05c6fb3533778d/uv-0.4.29-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:24cccff9c248864ba0ab3429bae56314146c9494ce66a881d70ea8cf2805945f", size = 12789635 }, + { url = "https://files.pythonhosted.org/packages/8d/8d/3103af713c6369b6c1afe2bd8415eb43ea2cd4d11aa823f2e5747736b410/uv-0.4.29-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:68d4967b5f0af8bd46085e0f3ded229026700668a97734a21c3d11a5fc350c47", size = 13022589 }, + { url = "https://files.pythonhosted.org/packages/4f/4d/e9a0da7c43301f27503ed0af881afb9059e3700bd374d1c7c6579ff9fb29/uv-0.4.29-py3-none-musllinux_1_1_i686.whl", hash = "sha256:75927da78f74bb935314d236dc61ecdc192e878e06eb79585b6d9d5ee9829f98", size = 13367805 }, + { url = "https://files.pythonhosted.org/packages/be/70/a78cd7cdac7581cf0a7e027cf3c69d07ca5b6b83d39f571411cc73f1590f/uv-0.4.29-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:246da468ac0d51e7fb257cd038db2f8d6376ae269a44d01f56776e32108aa9da", size = 15158094 }, + { url = "https://files.pythonhosted.org/packages/e6/93/3bcb18a54a9823c8bfadd362022b1c480da10c0bcd86398101f9a124e0a7/uv-0.4.29-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:8c71663c7df4f512c697de39a4926dc191897f5fede73644bb2329f532c1ebfa", size = 13917229 }, + { url = "https://files.pythonhosted.org/packages/8a/38/bd90e265f868ddbc2dd3cc9625e2d8670d3ac35984a078491be11be754f3/uv-0.4.29-py3-none-win32.whl", hash = "sha256:b5775db128b98251c3ea7874367fc20dce9f9aac3dbfa635e3ef4a1c56842d9c", size = 13203439 }, + { url = "https://files.pythonhosted.org/packages/cb/4f/446a0fe5901b110093f3888e93c8ebee1b08f35ba1699bbaf3645b553865/uv-0.4.29-py3-none-win_amd64.whl", hash = "sha256:67dcfd253020e25ed1c49e5bd06406205c37264f99e14002de53a357cd1cdadf", size = 14902665 }, +] + +[[package]] +name = "virtualenv" +version = "20.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/b3/7b6a79c5c8cf6d90ea681310e169cf2db2884f4d583d16c6e1d5a75a4e04/virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba", size = 6491145 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/92/78324ff89391e00c8f4cf6b8526c41c6ef36b4ea2d2c132250b1a6fc2b8d/virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4", size = 3117838 }, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/c6/5375258add3777494671d8cec27cdf5402abd91016dee24aa2972c61fedf/wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4", size = 37315 }, + { url = "https://files.pythonhosted.org/packages/32/12/e11adfde33444986135d8881b401e4de6cbb4cced046edc6b464e6ad7547/wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020", size = 38160 }, + { url = "https://files.pythonhosted.org/packages/70/7d/3dcc4a7e96f8d3e398450ec7703db384413f79bd6c0196e0e139055ce00f/wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440", size = 80419 }, + { url = "https://files.pythonhosted.org/packages/d1/c4/8dfdc3c2f0b38be85c8d9fdf0011ebad2f54e40897f9549a356bebb63a97/wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487", size = 72669 }, + { url = "https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf", size = 80271 }, + { url = "https://files.pythonhosted.org/packages/19/d4/cd33d3a82df73a064c9b6401d14f346e1d2fb372885f0295516ec08ed2ee/wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72", size = 84748 }, + { url = "https://files.pythonhosted.org/packages/ef/58/2fde309415b5fa98fd8f5f4a11886cbf276824c4c64d45a39da342fff6fe/wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0", size = 77522 }, + { url = "https://files.pythonhosted.org/packages/07/44/359e4724a92369b88dbf09878a7cde7393cf3da885567ea898e5904049a3/wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136", size = 84780 }, + { url = "https://files.pythonhosted.org/packages/88/8f/706f2fee019360cc1da652353330350c76aa5746b4e191082e45d6838faf/wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d", size = 35335 }, + { url = "https://files.pythonhosted.org/packages/19/2b/548d23362e3002ebbfaefe649b833fa43f6ca37ac3e95472130c4b69e0b4/wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2", size = 37528 }, + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313 }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890 }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118 }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746 }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556 }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523 }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614 }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316 }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322 }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055 }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291 }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374 }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896 }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738 }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568 }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653 }, + { url = "https://files.pythonhosted.org/packages/70/cc/b92e1da2cad6a9f8ee481000ece07a35e3b24e041e60ff8b850c079f0ebf/wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2", size = 37314 }, + { url = "https://files.pythonhosted.org/packages/4a/cc/3402bcc897978be00fef608cd9e3e39ec8869c973feeb5e1e277670e5ad2/wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb", size = 38162 }, + { url = "https://files.pythonhosted.org/packages/28/d3/4f079f649c515727c127c987b2ec2e0816b80d95784f2d28d1a57d2a1029/wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8", size = 80235 }, + { url = "https://files.pythonhosted.org/packages/a3/1c/226c2a4932e578a2241dcb383f425995f80224b446f439c2e112eb51c3a6/wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c", size = 72553 }, + { url = "https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a", size = 80129 }, + { url = "https://files.pythonhosted.org/packages/da/6f/6d0b3c4983f1fc764a422989dabc268ee87d937763246cd48aa92f1eed1e/wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664", size = 84550 }, + { url = "https://files.pythonhosted.org/packages/96/e8/27ef35cf61e5147c1c3abcb89cfbb8d691b2bb8364803fcc950140bc14d8/wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f", size = 77352 }, + { url = "https://files.pythonhosted.org/packages/b6/ad/7a0766341081bfd9f18a7049e4d6d45586ae5c5bb0a640f05e2f558e849c/wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537", size = 84626 }, + { url = "https://files.pythonhosted.org/packages/09/43/b26852e9c45a1aac0d14b1080b25b612fa840ba99739c5fc55db07b7ce08/wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/74/f2/96ed140b08743f7f68d5bda35a2a589600781366c3da96f056043d258b1a/wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35", size = 37526 }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362 }, +] + +[[package]] +name = "zipp" +version = "3.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 }, +] + +[[package]] +name = "zstandard" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/55/bd0487e86679db1823fc9ee0d8c9c78ae2413d34c0b461193b5f4c31d22f/zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9", size = 788701 }, + { url = "https://files.pythonhosted.org/packages/e1/8a/ccb516b684f3ad987dfee27570d635822e3038645b1a950c5e8022df1145/zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880", size = 633678 }, + { url = "https://files.pythonhosted.org/packages/12/89/75e633d0611c028e0d9af6df199423bf43f54bea5007e6718ab7132e234c/zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc", size = 4941098 }, + { url = "https://files.pythonhosted.org/packages/4a/7a/bd7f6a21802de358b63f1ee636ab823711c25ce043a3e9f043b4fcb5ba32/zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573", size = 5308798 }, + { url = "https://files.pythonhosted.org/packages/79/3b/775f851a4a65013e88ca559c8ae42ac1352db6fcd96b028d0df4d7d1d7b4/zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391", size = 5341840 }, + { url = "https://files.pythonhosted.org/packages/09/4f/0cc49570141dd72d4d95dd6fcf09328d1b702c47a6ec12fbed3b8aed18a5/zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e", size = 5440337 }, + { url = "https://files.pythonhosted.org/packages/e7/7c/aaa7cd27148bae2dc095191529c0570d16058c54c4597a7d118de4b21676/zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd", size = 4861182 }, + { url = "https://files.pythonhosted.org/packages/ac/eb/4b58b5c071d177f7dc027129d20bd2a44161faca6592a67f8fcb0b88b3ae/zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4", size = 4932936 }, + { url = "https://files.pythonhosted.org/packages/44/f9/21a5fb9bb7c9a274b05ad700a82ad22ce82f7ef0f485980a1e98ed6e8c5f/zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea", size = 5464705 }, + { url = "https://files.pythonhosted.org/packages/49/74/b7b3e61db3f88632776b78b1db597af3f44c91ce17d533e14a25ce6a2816/zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2", size = 4857882 }, + { url = "https://files.pythonhosted.org/packages/4a/7f/d8eb1cb123d8e4c541d4465167080bec88481ab54cd0b31eb4013ba04b95/zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9", size = 4697672 }, + { url = "https://files.pythonhosted.org/packages/5e/05/f7dccdf3d121309b60342da454d3e706453a31073e2c4dac8e1581861e44/zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a", size = 5206043 }, + { url = "https://files.pythonhosted.org/packages/86/9d/3677a02e172dccd8dd3a941307621c0cbd7691d77cb435ac3c75ab6a3105/zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0", size = 5667390 }, + { url = "https://files.pythonhosted.org/packages/41/7e/0012a02458e74a7ba122cd9cafe491facc602c9a17f590367da369929498/zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c", size = 5198901 }, + { url = "https://files.pythonhosted.org/packages/65/3a/8f715b97bd7bcfc7342d8adcd99a026cb2fb550e44866a3b6c348e1b0f02/zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813", size = 430596 }, + { url = "https://files.pythonhosted.org/packages/19/b7/b2b9eca5e5a01111e4fe8a8ffb56bdcdf56b12448a24effe6cfe4a252034/zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4", size = 495498 }, + { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699 }, + { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681 }, + { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328 }, + { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955 }, + { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944 }, + { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927 }, + { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910 }, + { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544 }, + { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094 }, + { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440 }, + { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091 }, + { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682 }, + { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707 }, + { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792 }, + { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586 }, + { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420 }, + { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459 }, + { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707 }, + { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545 }, + { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533 }, + { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510 }, + { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973 }, + { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968 }, + { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179 }, + { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577 }, + { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899 }, + { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964 }, + { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398 }, + { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313 }, + { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877 }, + { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595 }, + { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975 }, + { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448 }, + { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269 }, + { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228 }, + { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891 }, + { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310 }, + { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912 }, + { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946 }, + { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994 }, + { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681 }, + { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239 }, + { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149 }, + { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392 }, + { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299 }, + { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862 }, + { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578 }, + { url = "https://files.pythonhosted.org/packages/fb/96/4fcafeb7e013a2386d22f974b5b97a0b9a65004ed58c87ae001599bfbd48/zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb", size = 788697 }, + { url = "https://files.pythonhosted.org/packages/83/ff/a52ce725be69b86a2967ecba0497a8184540cc284c0991125515449e54e2/zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916", size = 633679 }, + { url = "https://files.pythonhosted.org/packages/34/0f/3dc62db122f6a9c481c335fff6fc9f4e88d8f6e2d47321ee3937328addb4/zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a", size = 4940416 }, + { url = "https://files.pythonhosted.org/packages/1d/e5/9fe0dd8c85fdc2f635e6660d07872a5dc4b366db566630161e39f9f804e1/zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259", size = 5307693 }, + { url = "https://files.pythonhosted.org/packages/73/bf/fe62c0cd865c171ee8ed5bc83174b5382a2cb729c8d6162edfb99a83158b/zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4", size = 5341236 }, + { url = "https://files.pythonhosted.org/packages/39/86/4fe79b30c794286110802a6cd44a73b6a314ac8196b9338c0fbd78c2407d/zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58", size = 5439101 }, + { url = "https://files.pythonhosted.org/packages/72/ed/cacec235c581ebf8c608c7fb3d4b6b70d1b490d0e5128ea6996f809ecaef/zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15", size = 4860320 }, + { url = "https://files.pythonhosted.org/packages/f6/1e/2c589a2930f93946b132fc852c574a19d5edc23fad2b9e566f431050c7ec/zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269", size = 4931933 }, + { url = "https://files.pythonhosted.org/packages/8e/f5/30eadde3686d902b5d4692bb5f286977cbc4adc082145eb3f49d834b2eae/zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700", size = 5463878 }, + { url = "https://files.pythonhosted.org/packages/e0/c8/8aed1f0ab9854ef48e5ad4431367fcb23ce73f0304f7b72335a8edc66556/zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9", size = 4857192 }, + { url = "https://files.pythonhosted.org/packages/a8/c6/55e666cfbcd032b9e271865e8578fec56e5594d4faeac379d371526514f5/zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69", size = 4696513 }, + { url = "https://files.pythonhosted.org/packages/dc/bd/720b65bea63ec9de0ac7414c33b9baf271c8de8996e5ff324dc93fc90ff1/zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70", size = 5204823 }, + { url = "https://files.pythonhosted.org/packages/d8/40/d678db1556e3941d330cd4e95623a63ef235b18547da98fa184cbc028ecf/zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2", size = 5666490 }, + { url = "https://files.pythonhosted.org/packages/ed/cc/c89329723d7515898a1fc7ef5d251264078548c505719d13e9511800a103/zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5", size = 5196622 }, + { url = "https://files.pythonhosted.org/packages/78/4c/634289d41e094327a94500dfc919e58841b10ea3a9efdfafbac614797ec2/zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274", size = 430620 }, + { url = "https://files.pythonhosted.org/packages/a2/e2/0b0c5a0f4f7699fecd92c1ba6278ef9b01f2b0b0dd46f62bfc6729c05659/zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58", size = 495528 }, +] From 089aa63fc4ed879ff7f90da37dc39e5cd815bfa9 Mon Sep 17 00:00:00 2001 From: Robert Volkmann <20912167+robertvolkmann@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:32:29 +0100 Subject: [PATCH 021/137] Rebuild pgbouncer image with newer version (#42349) Bump base image because alpine 3.14 is end-of-live --- chart/dockerfiles/pgbouncer/Dockerfile | 4 ++-- chart/dockerfiles/pgbouncer/build_and_push.sh | 6 +++--- chart/values.schema.json | 2 +- chart/values.yaml | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/chart/dockerfiles/pgbouncer/Dockerfile b/chart/dockerfiles/pgbouncer/Dockerfile index f561fadc3ebb..fcc45da47ba9 100644 --- a/chart/dockerfiles/pgbouncer/Dockerfile +++ b/chart/dockerfiles/pgbouncer/Dockerfile @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -ARG ALPINE_VERSION="3.14" +ARG ALPINE_VERSION="3.19" FROM alpine:${ALPINE_VERSION} AS builder SHELL ["/bin/ash", "-e", "-x", "-c", "-o", "pipefail"] @@ -26,7 +26,7 @@ ARG PGBOUNCER_SHA256 # Those are build deps only but still we want the latest versions of those # "Pin versions in apk add" https://github.com/hadolint/hadolint/wiki/DL3018 # hadolint ignore=DL3018 -RUN apk --no-cache add make pkgconfig build-base libtool wget gcc g++ libevent-dev libressl-dev c-ares-dev ca-certificates +RUN apk --no-cache add make pkgconfig build-base libtool wget gcc g++ libevent-dev openssl-dev c-ares-dev ca-certificates # We are not using Dash so we can safely ignore the "Dash warning" # "In dash, something is not supported." https://github.com/koalaman/shellcheck/wiki/SC2169 # hadolint ignore=SC2169,SC3060 diff --git a/chart/dockerfiles/pgbouncer/build_and_push.sh b/chart/dockerfiles/pgbouncer/build_and_push.sh index 24ab57882daf..cede5ab8b0ab 100755 --- a/chart/dockerfiles/pgbouncer/build_and_push.sh +++ b/chart/dockerfiles/pgbouncer/build_and_push.sh @@ -22,13 +22,13 @@ readonly DOCKERHUB_USER DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow"} readonly DOCKERHUB_REPO -PGBOUNCER_VERSION="1.21.0" +PGBOUNCER_VERSION="1.22.1" readonly PGBOUNCER_VERSION -PGBOUNCER_SHA256="7e1dd620c8d85a8490aff25061d5055d7aef9cf3e8bfe2d9e7719b8ee59114e2" +PGBOUNCER_SHA256="2b018aa6ce7f592c9892bb9e0fd90262484eb73937fd2af929770a45373ba215" readonly PGBOUNCER_SHA256 -AIRFLOW_PGBOUNCER_VERSION="2024.01.19" +AIRFLOW_PGBOUNCER_VERSION="2024.09.19" readonly AIRFLOW_PGBOUNCER_VERSION COMMIT_SHA=$(git rev-parse HEAD) diff --git a/chart/values.schema.json b/chart/values.schema.json index ea673a40c2d6..b59bb6dbff10 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -895,7 +895,7 @@ "tag": { "description": "The PgBouncer image tag.", "type": "string", - "default": "airflow-pgbouncer-2024.01.19-1.21.0" + "default": "airflow-pgbouncer-2024.09.19-1.22.1" }, "pullPolicy": { "description": "The PgBouncer image pull policy.", diff --git a/chart/values.yaml b/chart/values.yaml index 6c3b2aeb9bd1..11a8dc035f14 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -115,7 +115,7 @@ images: pullPolicy: IfNotPresent pgbouncer: repository: apache/airflow - tag: airflow-pgbouncer-2024.01.19-1.21.0 + tag: airflow-pgbouncer-2024.09.19-1.22.1 pullPolicy: IfNotPresent pgbouncerExporter: repository: apache/airflow From 07e6ada4bc4f0f67c24694c35060d1bb00d45700 Mon Sep 17 00:00:00 2001 From: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> Date: Mon, 4 Nov 2024 21:33:34 +0100 Subject: [PATCH 022/137] Fix venv numpy example which needs to be 1.26 at least to be working in Python 3.12 (#43653) (cherry picked from commit 02d0cac5d8dfef1b1a3813983c9279c32f59d711) --- airflow/example_dags/example_branch_operator_decorator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/example_dags/example_branch_operator_decorator.py b/airflow/example_dags/example_branch_operator_decorator.py index e9b3bea97a72..95bfba4cdc60 100644 --- a/airflow/example_dags/example_branch_operator_decorator.py +++ b/airflow/example_dags/example_branch_operator_decorator.py @@ -112,7 +112,7 @@ def some_ext_py_task(): # Run the example a second time and see that it re-uses it and is faster. VENV_CACHE_PATH = tempfile.gettempdir() - @task.branch_virtualenv(requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH) + @task.branch_virtualenv(requirements=["numpy~=1.26.0"], venv_cache_path=VENV_CACHE_PATH) def branching_virtualenv(choices) -> str: import random @@ -132,7 +132,7 @@ def branching_virtualenv(choices) -> str: for option in options: @task.virtualenv( - task_id=f"venv_{option}", requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH + task_id=f"venv_{option}", requirements=["numpy~=1.26.0"], venv_cache_path=VENV_CACHE_PATH ) def some_venv_task(): import numpy as np From c65a10f32193cd914259d3292e72b9a9e90a44ba Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:06:55 -0800 Subject: [PATCH 023/137] Remove auth manager tests of backfill (#43649) Security is not yet implemented for fast api endpoints. --- .../api_endpoints/test_backfill_endpoint.py | 269 ------------------ 1 file changed, 269 deletions(-) delete mode 100644 providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py deleted file mode 100644 index d09f94c46534..000000000000 --- a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py +++ /dev/null @@ -1,269 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime -from unittest import mock - -import pendulum -import pytest - -from airflow.models import DagBag, DagModel -from airflow.models.dag import DAG -from airflow.models.serialized_dag import SerializedDagModel -from airflow.operators.empty import EmptyOperator -from airflow.security import permissions -from airflow.utils import timezone -from airflow.utils.session import provide_session - -from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests_common.test_utils.db import ( - clear_db_backfills, - clear_db_dags, - clear_db_runs, - clear_db_serialized_dags, -) - -try: - from airflow.models.backfill import Backfill -except ImportError: - if AIRFLOW_V_3_0_PLUS: - raise - else: - pass - -pytestmark = [ - pytest.mark.db_test, - pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Test requires Airflow 3.0+"), -] - - -DAG_ID = "test_dag" -TASK_ID = "op1" -DAG2_ID = "test_dag2" -DAG3_ID = "test_dag3" -UTC_JSON_REPR = "UTC" if pendulum.__version__.startswith("3") else "Timezone('UTC')" - - -@pytest.fixture(scope="module") -def configured_app(minimal_app_for_auth_api): - app = minimal_app_for_auth_api - - create_user(app, username="test_granular_permissions", role_name="TestGranularDag") - app.appbuilder.sm.sync_perm_for_dag( - "TEST_DAG_1", - access_control={ - "TestGranularDag": { - permissions.RESOURCE_DAG: {permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ} - }, - }, - ) - - with DAG( - DAG_ID, - schedule=None, - start_date=datetime(2020, 6, 15), - doc_md="details", - params={"foo": 1}, - tags=["example"], - ) as dag: - EmptyOperator(task_id=TASK_ID) - - with DAG(DAG2_ID, schedule=None, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md - EmptyOperator(task_id=TASK_ID) - - with DAG(DAG3_ID, schedule=None) as dag3: # DAG start_date set to None - EmptyOperator(task_id=TASK_ID, start_date=datetime(2019, 6, 12)) - - dag_bag = DagBag(os.devnull, include_examples=False) - dag_bag.dags = {dag.dag_id: dag, dag2.dag_id: dag2, dag3.dag_id: dag3} - - app.dag_bag = dag_bag - - yield app - - delete_user(app, username="test_granular_permissions") - - -class TestBackfillEndpoint: - @staticmethod - def clean_db(): - clear_db_backfills() - clear_db_runs() - clear_db_dags() - clear_db_serialized_dags() - - @pytest.fixture(autouse=True) - def setup_attrs(self, configured_app) -> None: - self.clean_db() - self.app = configured_app - self.client = self.app.test_client() # type:ignore - self.dag_id = DAG_ID - self.dag2_id = DAG2_ID - self.dag3_id = DAG3_ID - - def teardown_method(self) -> None: - self.clean_db() - - @provide_session - def _create_dag_models(self, *, count=1, dag_id_prefix="TEST_DAG", is_paused=False, session=None): - dags = [] - for num in range(1, count + 1): - dag_model = DagModel( - dag_id=f"{dag_id_prefix}_{num}", - fileloc=f"/tmp/dag_{num}.py", - is_active=True, - timetable_summary="0 0 * * *", - is_paused=is_paused, - ) - session.add(dag_model) - dags.append(dag_model) - return dags - - @provide_session - def _create_deactivated_dag(self, session=None): - dag_model = DagModel( - dag_id="TEST_DAG_DELETED_1", - fileloc="/tmp/dag_del_1.py", - schedule_interval="2 2 * * *", - is_active=False, - ) - session.add(dag_model) - - -class TestListBackfills(TestBackfillEndpoint): - def test_should_respond_200_with_granular_dag_access(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - b = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - - session.add(b) - session.commit() - kwargs = {} - kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) - response = self.client.get("/api/v1/backfills?dag_id=TEST_DAG_1", **kwargs) - assert response.status_code == 200 - - -class TestGetBackfill(TestBackfillEndpoint): - def test_should_respond_200_with_granular_dag_access(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) - response = self.client.get(f"/api/v1/backfills/{backfill.id}", **kwargs) - assert response.status_code == 200 - - -class TestCreateBackfill(TestBackfillEndpoint): - def test_create_backfill(self, session, dag_maker): - with dag_maker(session=session, dag_id="TEST_DAG_1", schedule="0 * * * *") as dag: - EmptyOperator(task_id="mytask") - session.add(SerializedDagModel(dag)) - session.commit() - session.query(DagModel).all() - from_date = pendulum.parse("2024-01-01") - from_date_iso = from_date.isoformat() - to_date = pendulum.parse("2024-02-01") - to_date_iso = to_date.isoformat() - max_active_runs = 5 - data = { - "dag_id": dag.dag_id, - "from_date": f"{from_date_iso}", - "to_date": f"{to_date_iso}", - "max_active_runs": max_active_runs, - "reverse": False, - } - kwargs = {} - kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) - - response = self.client.post( - "/api/v1/backfills", - **kwargs, - json=data, - ) - assert response.status_code == 200 - assert response.json == { - "completed_at": mock.ANY, - "created_at": mock.ANY, - "dag_id": "TEST_DAG_1", - "dag_run_conf": None, - "from_date": from_date_iso, - "id": mock.ANY, - "is_paused": False, - "reprocess_behavior": "none", - "max_active_runs": 5, - "to_date": to_date_iso, - "updated_at": mock.ANY, - } - - -class TestPauseBackfill(TestBackfillEndpoint): - def test_should_respond_200_with_granular_dag_access(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) - response = self.client.post(f"/api/v1/backfills/{backfill.id}/pause", **kwargs) - assert response.status_code == 200 - - -class TestCancelBackfill(TestBackfillEndpoint): - def test_should_respond_200_with_granular_dag_access(self, session): - (dag,) = self._create_dag_models() - from_date = timezone.utcnow() - to_date = timezone.utcnow() - backfill = Backfill( - dag_id=dag.dag_id, - from_date=from_date, - to_date=to_date, - ) - session.add(backfill) - session.commit() - kwargs = {} - kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) - response = self.client.post(f"/api/v1/backfills/{backfill.id}/cancel", **kwargs) - assert response.status_code == 200 - # now it is marked as completed - assert pendulum.parse(response.json["completed_at"]) - - # get conflict when canceling already-canceled backfill - response = self.client.post(f"/api/v1/backfills/{backfill.id}/cancel", **kwargs) - assert response.status_code == 409 From 286075f05932db9480c1a1e2260b7327247562de Mon Sep 17 00:00:00 2001 From: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> Date: Mon, 4 Nov 2024 22:27:24 +0100 Subject: [PATCH 024/137] Add UV support to venv operators (#43612) * Add UV support to venv operators * Uups, allow creation also when requirements+pip.conf are used * Fix venv numpy example which needs to be 1.26 at least to be working in Python 3.12 * Review feedback and pytests * Fix pytests * Revert fix in examples * Add newsfragment * Update providers/src/airflow/providers/standard/provider.yaml Co-authored-by: GPK * Update providers/src/airflow/providers/standard/utils/python_virtualenv.py Co-authored-by: GPK --------- Co-authored-by: GPK --- newsfragments/43612.significant.rst | 8 ++ .../airflow/providers/standard/provider.yaml | 17 ++++ .../standard/utils/python_virtualenv.py | 84 ++++++++++++++-- .../standard/utils/test_python_virtualenv.py | 97 +++++++++++++++++-- 4 files changed, 190 insertions(+), 16 deletions(-) create mode 100644 newsfragments/43612.significant.rst diff --git a/newsfragments/43612.significant.rst b/newsfragments/43612.significant.rst new file mode 100644 index 000000000000..8c0ec597c117 --- /dev/null +++ b/newsfragments/43612.significant.rst @@ -0,0 +1,8 @@ +Virtualenv installation uses ``uv`` now per default if ``uv`` is available. + +If you want to control how the virtualenv is created, you can use the +AIRFLOW__STANDARD__VENV_INSTALL_METHOD option. The possible values are: + +- ``auto``: Automatically select, use ``uv`` if available, otherwise use ``pip``. +- ``pip``: Use pip to install the virtual environment. +- ``uv``: Use uv to install the virtual environment. Must be available in environment PATH. diff --git a/providers/src/airflow/providers/standard/provider.yaml b/providers/src/airflow/providers/standard/provider.yaml index ee069165dfe3..007cfd812b7a 100644 --- a/providers/src/airflow/providers/standard/provider.yaml +++ b/providers/src/airflow/providers/standard/provider.yaml @@ -62,3 +62,20 @@ hooks: - airflow.providers.standard.hooks.filesystem - airflow.providers.standard.hooks.package_index - airflow.providers.standard.hooks.subprocess + +config: + standard: + description: Options for the standard provider operators. + options: + venv_install_method: + description: | + Which python tooling should be used to install the virtual environment. + + The following options are available: + - ``auto``: Automatically select, use ``uv`` if available, otherwise use ``pip``. + - ``pip``: Use pip to install the virtual environment. + - ``uv``: Use uv to install the virtual environment. Must be available in environment PATH. + version_added: ~ + type: string + example: uv + default: auto diff --git a/providers/src/airflow/providers/standard/utils/python_virtualenv.py b/providers/src/airflow/providers/standard/utils/python_virtualenv.py index b9c7be4c2832..9d03e43367a4 100644 --- a/providers/src/airflow/providers/standard/utils/python_virtualenv.py +++ b/providers/src/airflow/providers/standard/utils/python_virtualenv.py @@ -20,15 +20,51 @@ from __future__ import annotations import os +import shutil import sys from pathlib import Path import jinja2 from jinja2 import select_autoescape +from airflow.configuration import conf from airflow.utils.process_utils import execute_in_subprocess +def _is_uv_installed() -> bool: + """ + Verify whether the uv tool is installed by checking if it's included in the system PATH or installed as a package. + + :return: True if it is. Whichever way of checking it works, is fine. + """ + return bool(shutil.which("uv")) + + +def _use_uv() -> bool: + """ + Check if the uv tool should be used. + + :return: True if uv should be used. + """ + venv_install_method = conf.get("standard", "venv_install_method", fallback="auto").lower() + if venv_install_method == "auto": + return _is_uv_installed() + elif venv_install_method == "uv": + return True + return False + + +def _generate_uv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]: + """Build the command to install the venv via UV.""" + cmd = ["uv", "venv", "--allow-existing", "--seed"] + if python_bin is not None: + cmd += ["--python", python_bin] + if system_site_packages: + cmd.append("--system-site-packages") + cmd.append(tmp_dir) + return cmd + + def _generate_venv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]: """We are using venv command instead of venv module to allow creation of venv for different python versions.""" if python_bin is None: @@ -39,12 +75,33 @@ def _generate_venv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool return cmd +def _generate_uv_install_cmd_from_file( + tmp_dir: str, requirements_file_path: str, pip_install_options: list[str] +) -> list[str]: + return [ + "uv", + "pip", + "install", + "--python", + f"{tmp_dir}/bin/python", + *pip_install_options, + "-r", + requirements_file_path, + ] + + def _generate_pip_install_cmd_from_file( tmp_dir: str, requirements_file_path: str, pip_install_options: list[str] ) -> list[str]: return [f"{tmp_dir}/bin/pip", "install", *pip_install_options, "-r", requirements_file_path] +def _generate_uv_install_cmd_from_list( + tmp_dir: str, requirements: list[str], pip_install_options: list[str] +) -> list[str]: + return ["uv", "pip", "install", "--python", f"{tmp_dir}/bin/python", *pip_install_options, *requirements] + + def _generate_pip_install_cmd_from_list( tmp_dir: str, requirements: list[str], pip_install_options: list[str] ) -> list[str]: @@ -88,22 +145,33 @@ def prepare_virtualenv( if pip_install_options is None: pip_install_options = [] + if requirements is not None and requirements_file_path is not None: + raise ValueError("Either requirements OR requirements_file_path has to be passed, but not both") + if index_urls is not None: _generate_pip_conf(Path(venv_directory) / "pip.conf", index_urls) - venv_cmd = _generate_venv_cmd(venv_directory, python_bin, system_site_packages) + if _use_uv(): + venv_cmd = _generate_uv_cmd(venv_directory, python_bin, system_site_packages) + else: + venv_cmd = _generate_venv_cmd(venv_directory, python_bin, system_site_packages) execute_in_subprocess(venv_cmd) - if requirements is not None and requirements_file_path is not None: - raise ValueError("Either requirements OR requirements_file_path has to be passed, but not both") - pip_cmd = None if requirements is not None and len(requirements) != 0: - pip_cmd = _generate_pip_install_cmd_from_list(venv_directory, requirements, pip_install_options) + if _use_uv(): + pip_cmd = _generate_uv_install_cmd_from_list(venv_directory, requirements, pip_install_options) + else: + pip_cmd = _generate_pip_install_cmd_from_list(venv_directory, requirements, pip_install_options) if requirements_file_path is not None and requirements_file_path: - pip_cmd = _generate_pip_install_cmd_from_file( - venv_directory, requirements_file_path, pip_install_options - ) + if _use_uv(): + pip_cmd = _generate_uv_install_cmd_from_file( + venv_directory, requirements_file_path, pip_install_options + ) + else: + pip_cmd = _generate_pip_install_cmd_from_file( + venv_directory, requirements_file_path, pip_install_options + ) if pip_cmd: execute_in_subprocess(pip_cmd) diff --git a/providers/tests/standard/utils/test_python_virtualenv.py b/providers/tests/standard/utils/test_python_virtualenv.py index da9cf757d74f..0e10dcf5305c 100644 --- a/providers/tests/standard/utils/test_python_virtualenv.py +++ b/providers/tests/standard/utils/test_python_virtualenv.py @@ -22,11 +22,28 @@ import pytest -from airflow.providers.standard.utils.python_virtualenv import _generate_pip_conf, prepare_virtualenv +from airflow.providers.standard.utils.python_virtualenv import _generate_pip_conf, _use_uv, prepare_virtualenv from airflow.utils.decorators import remove_task_decorator +from tests_common.test_utils.config import conf_vars + class TestPrepareVirtualenv: + @mock.patch("shutil.which") + def test_use_uv(self, mock_shutil_which): + with conf_vars({("standard", "venv_install_method"): "auto"}): + mock_shutil_which.side_effect = [True] + assert _use_uv() is True + + mock_shutil_which.side_effect = [False] + assert _use_uv() is False + + with conf_vars({("standard", "venv_install_method"): "uv"}): + assert _use_uv() is True + + with conf_vars({("standard", "venv_install_method"): "pip"}): + assert _use_uv() is False + @pytest.mark.parametrize( ("index_urls", "expected_pip_conf_content", "unexpected_pip_conf_content"), [ @@ -60,7 +77,8 @@ def test_generate_pip_conf( assert term not in generated_conf @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") - def test_should_create_virtualenv(self, mock_execute_in_subprocess): + @conf_vars({("standard", "venv_install_method"): "pip"}) + def test_should_create_virtualenv_pip(self, mock_execute_in_subprocess): python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=False, requirements=[] ) @@ -68,7 +86,19 @@ def test_should_create_virtualenv(self, mock_execute_in_subprocess): mock_execute_in_subprocess.assert_called_once_with(["pythonVER", "-m", "venv", "/VENV"]) @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") - def test_should_create_virtualenv_with_system_packages(self, mock_execute_in_subprocess): + @conf_vars({("standard", "venv_install_method"): "uv"}) + def test_should_create_virtualenv_uv(self, mock_execute_in_subprocess): + python_bin = prepare_virtualenv( + venv_directory="/VENV", python_bin="pythonVER", system_site_packages=False, requirements=[] + ) + assert "/VENV/bin/python" == python_bin + mock_execute_in_subprocess.assert_called_once_with( + ["uv", "venv", "--allow-existing", "--seed", "--python", "pythonVER", "/VENV"] + ) + + @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") + @conf_vars({("standard", "venv_install_method"): "pip"}) + def test_should_create_virtualenv_with_system_packages_pip(self, mock_execute_in_subprocess): python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", system_site_packages=True, requirements=[] ) @@ -78,7 +108,28 @@ def test_should_create_virtualenv_with_system_packages(self, mock_execute_in_sub ) @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") - def test_pip_install_options(self, mock_execute_in_subprocess): + @conf_vars({("standard", "venv_install_method"): "uv"}) + def test_should_create_virtualenv_with_system_packages_uv(self, mock_execute_in_subprocess): + python_bin = prepare_virtualenv( + venv_directory="/VENV", python_bin="pythonVER", system_site_packages=True, requirements=[] + ) + assert "/VENV/bin/python" == python_bin + mock_execute_in_subprocess.assert_called_once_with( + [ + "uv", + "venv", + "--allow-existing", + "--seed", + "--python", + "pythonVER", + "--system-site-packages", + "/VENV", + ] + ) + + @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") + @conf_vars({("standard", "venv_install_method"): "pip"}) + def test_pip_install_options_pip(self, mock_execute_in_subprocess): pip_install_options = ["--no-deps"] python_bin = prepare_virtualenv( venv_directory="/VENV", @@ -89,15 +140,30 @@ def test_pip_install_options(self, mock_execute_in_subprocess): ) assert "/VENV/bin/python" == python_bin - mock_execute_in_subprocess.assert_any_call( - ["pythonVER", "-m", "venv", "/VENV", "--system-site-packages"] - ) mock_execute_in_subprocess.assert_called_with( ["/VENV/bin/pip", "install", *pip_install_options, "apache-beam[gcp]"] ) @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") - def test_should_create_virtualenv_with_extra_packages(self, mock_execute_in_subprocess): + @conf_vars({("standard", "venv_install_method"): "uv"}) + def test_pip_install_options_uv(self, mock_execute_in_subprocess): + pip_install_options = ["--no-deps"] + python_bin = prepare_virtualenv( + venv_directory="/VENV", + python_bin="pythonVER", + system_site_packages=True, + requirements=["apache-beam[gcp]"], + pip_install_options=pip_install_options, + ) + + assert "/VENV/bin/python" == python_bin + mock_execute_in_subprocess.assert_called_with( + ["uv", "pip", "install", "--python", "/VENV/bin/python", *pip_install_options, "apache-beam[gcp]"] + ) + + @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") + @conf_vars({("standard", "venv_install_method"): "pip"}) + def test_should_create_virtualenv_with_extra_packages_pip(self, mock_execute_in_subprocess): python_bin = prepare_virtualenv( venv_directory="/VENV", python_bin="pythonVER", @@ -110,6 +176,21 @@ def test_should_create_virtualenv_with_extra_packages(self, mock_execute_in_subp mock_execute_in_subprocess.assert_called_with(["/VENV/bin/pip", "install", "apache-beam[gcp]"]) + @mock.patch("airflow.providers.standard.utils.python_virtualenv.execute_in_subprocess") + @conf_vars({("standard", "venv_install_method"): "uv"}) + def test_should_create_virtualenv_with_extra_packages_uv(self, mock_execute_in_subprocess): + python_bin = prepare_virtualenv( + venv_directory="/VENV", + python_bin="pythonVER", + system_site_packages=False, + requirements=["apache-beam[gcp]"], + ) + assert "/VENV/bin/python" == python_bin + + mock_execute_in_subprocess.assert_called_with( + ["uv", "pip", "install", "--python", "/VENV/bin/python", "apache-beam[gcp]"] + ) + def test_remove_task_decorator(self): py_source = "@task.virtualenv(use_dill=True)\ndef f():\nimport funcsigs" res = remove_task_decorator(python_source=py_source, task_decorator_name="@task.virtualenv") From 8b0ac8d5302623d5ec0e19d108f337f8916c1109 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Mon, 4 Nov 2024 22:54:32 +0000 Subject: [PATCH 025/137] Pass Task Instance ids in API response (#43664) https://github.com/apache/airflow/pull/43243 added Task Instance "id" as primary key. This PR passes the same API to API responses. --- airflow/api_fastapi/core_api/openapi/v1-generated.yaml | 4 ++++ airflow/api_fastapi/core_api/serializers/task_instances.py | 1 + airflow/ui/openapi-gen/requests/schemas.gen.ts | 5 +++++ airflow/ui/openapi-gen/requests/types.gen.ts | 1 + .../core_api/routes/public/test_task_instances.py | 6 ++++++ 5 files changed, 17 insertions(+) diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index a82e34a3f5ee..abd7f0baf5ed 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -3667,6 +3667,9 @@ components: description: Schema for Scheduler info. TaskInstanceResponse: properties: + id: + type: string + title: Id task_id: type: string title: Task Id @@ -3787,6 +3790,7 @@ components: - type: 'null' type: object required: + - id - task_id - dag_id - dag_run_id diff --git a/airflow/api_fastapi/core_api/serializers/task_instances.py b/airflow/api_fastapi/core_api/serializers/task_instances.py index b8a10e8fb861..07ef42d8e24b 100644 --- a/airflow/api_fastapi/core_api/serializers/task_instances.py +++ b/airflow/api_fastapi/core_api/serializers/task_instances.py @@ -31,6 +31,7 @@ class TaskInstanceResponse(BaseModel): model_config = ConfigDict(populate_by_name=True) + id: str task_id: str dag_id: str run_id: str = Field(alias="dag_run_id") diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 5624358636cb..cf51451c98bd 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -2300,6 +2300,10 @@ export const $SchedulerInfoSchema = { export const $TaskInstanceResponse = { properties: { + id: { + type: "string", + title: "Id", + }, task_id: { type: "string", title: "Task Id", @@ -2529,6 +2533,7 @@ export const $TaskInstanceResponse = { }, type: "object", required: [ + "id", "task_id", "dag_id", "dag_run_id", diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 2c11dd03cc0a..18d5bc296eb2 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -536,6 +536,7 @@ export type SchedulerInfoSchema = { * TaskInstance serializer for responses. */ export type TaskInstanceResponse = { + id: string; task_id: string; dag_id: string; dag_run_id: string; diff --git a/tests/api_fastapi/core_api/routes/public/test_task_instances.py b/tests/api_fastapi/core_api/routes/public/test_task_instances.py index 717f17ca278a..0f27abd567e2 100644 --- a/tests/api_fastapi/core_api/routes/public/test_task_instances.py +++ b/tests/api_fastapi/core_api/routes/public/test_task_instances.py @@ -19,6 +19,7 @@ import datetime as dt import urllib +from unittest import mock import pendulum import pytest @@ -180,6 +181,7 @@ def test_should_respond_200(self, test_client, session): "executor": None, "executor_config": "{}", "hostname": "", + "id": mock.ANY, "map_index": -1, "max_tries": 0, "note": "placeholder-note", @@ -237,6 +239,7 @@ def test_should_respond_200_with_task_state_in_deferred(self, test_client, sessi "executor": None, "executor_config": "{}", "hostname": "", + "id": mock.ANY, "map_index": -1, "max_tries": 0, "note": "placeholder-note", @@ -283,6 +286,7 @@ def test_should_respond_200_with_task_state_in_removed(self, test_client, sessio "executor": None, "executor_config": "{}", "hostname": "", + "id": mock.ANY, "map_index": -1, "max_tries": 0, "note": "placeholder-note", @@ -325,6 +329,7 @@ def test_should_respond_200_task_instance_with_rendered(self, test_client, sessi "executor": None, "executor_config": "{}", "hostname": "", + "id": mock.ANY, "map_index": -1, "max_tries": 0, "note": "placeholder-note", @@ -424,6 +429,7 @@ def test_should_respond_200_mapped_task_instance_with_rtif(self, test_client, se "executor": None, "executor_config": "{}", "hostname": "", + "id": mock.ANY, "map_index": map_index, "max_tries": 0, "note": "placeholder-note", From 3742b1ea9ee82e8bf5cef142232b5cc47faafe89 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Mon, 4 Nov 2024 23:15:17 +0000 Subject: [PATCH 026/137] Replace HTTP status code with named constants (#43663) --- .../core_api/routes/public/backfills.py | 60 ++++++++++---- .../core_api/routes/public/connections.py | 22 +++-- .../core_api/routes/public/dag_run.py | 51 +++++++++--- .../core_api/routes/public/dag_sources.py | 16 +++- .../core_api/routes/public/dag_warning.py | 7 +- .../core_api/routes/public/dags.py | 80 +++++++++++++++---- .../core_api/routes/public/event_logs.py | 8 +- .../core_api/routes/public/pools.py | 54 ++++++++++--- .../core_api/routes/public/task_instances.py | 22 +++-- .../core_api/routes/public/variables.py | 49 +++++++++--- .../api_fastapi/core_api/routes/ui/assets.py | 6 +- 11 files changed, 289 insertions(+), 86 deletions(-) diff --git a/airflow/api_fastapi/core_api/routes/public/backfills.py b/airflow/api_fastapi/core_api/routes/public/backfills.py index f6fe531d0063..e9e93673b8af 100644 --- a/airflow/api_fastapi/core_api/routes/public/backfills.py +++ b/airflow/api_fastapi/core_api/routes/public/backfills.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, status from sqlalchemy import select, update from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -47,7 +47,7 @@ @backfills_router.get( path="/", - responses=create_openapi_http_exception_doc([401, 403]), + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), ) async def list_backfills( dag_id: str, @@ -77,7 +77,9 @@ async def list_backfills( @backfills_router.get( path="/{backfill_id}", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_backfill( backfill_id: str, @@ -86,19 +88,26 @@ async def get_backfill( backfill = session.get(Backfill, backfill_id) if backfill: return BackfillResponse.model_validate(backfill, from_attributes=True) - raise HTTPException(404, "Backfill not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, "Backfill not found") @backfills_router.put( path="/{backfill_id}/pause", - responses=create_openapi_http_exception_doc([401, 403, 404, 409]), + responses=create_openapi_http_exception_doc( + [ + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), ) async def pause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): b = session.get(Backfill, backfill_id) if not b: - raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at: - raise HTTPException(409, "Backfill is already completed.") + raise HTTPException(status.HTTP_409_CONFLICT, "Backfill is already completed.") if b.is_paused is False: b.is_paused = True session.commit() @@ -107,14 +116,21 @@ async def pause_backfill(*, backfill_id, session: Annotated[Session, Depends(get @backfills_router.put( path="/{backfill_id}/unpause", - responses=create_openapi_http_exception_doc([401, 403, 404, 409]), + responses=create_openapi_http_exception_doc( + [ + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), ) async def unpause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): b = session.get(Backfill, backfill_id) if not b: - raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at: - raise HTTPException(409, "Backfill is already completed.") + raise HTTPException(status.HTTP_409_CONFLICT, "Backfill is already completed.") if b.is_paused: b.is_paused = False return BackfillResponse.model_validate(b, from_attributes=True) @@ -122,14 +138,21 @@ async def unpause_backfill(*, backfill_id, session: Annotated[Session, Depends(g @backfills_router.put( path="/{backfill_id}/cancel", - responses=create_openapi_http_exception_doc([401, 403, 404, 409]), + responses=create_openapi_http_exception_doc( + [ + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), ) async def cancel_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): b: Backfill = session.get(Backfill, backfill_id) if not b: - raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at is not None: - raise HTTPException(409, "Backfill is already completed.") + raise HTTPException(status.HTTP_409_CONFLICT, "Backfill is already completed.") # first, pause, and commit immediately to ensure no other dag runs are started if not b.is_paused: @@ -162,7 +185,14 @@ async def cancel_backfill(*, backfill_id, session: Annotated[Session, Depends(ge @backfills_router.post( path="/", - responses=create_openapi_http_exception_doc([401, 403, 404, 409]), + responses=create_openapi_http_exception_doc( + [ + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), ) async def create_backfill( backfill_request: BackfillPostBody, @@ -182,6 +212,6 @@ async def create_backfill( return BackfillResponse.model_validate(backfill_obj, from_attributes=True) except AlreadyRunningBackfill: raise HTTPException( - status_code=409, + status_code=status.HTTP_409_CONFLICT, detail=f"There is already a running backfill for dag {backfill_request.dag_id}", ) diff --git a/airflow/api_fastapi/core_api/routes/public/connections.py b/airflow/api_fastapi/core_api/routes/public/connections.py index 60898d2a63a6..8d9f9ddb8ebf 100644 --- a/airflow/api_fastapi/core_api/routes/public/connections.py +++ b/airflow/api_fastapi/core_api/routes/public/connections.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -37,7 +37,9 @@ @connections_router.delete( "/{connection_id}", status_code=204, - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def delete_connection( connection_id: str, @@ -47,14 +49,18 @@ async def delete_connection( connection = session.scalar(select(Connection).filter_by(conn_id=connection_id)) if connection is None: - raise HTTPException(404, f"The Connection with connection_id: `{connection_id}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"The Connection with connection_id: `{connection_id}` was not found" + ) session.delete(connection) @connections_router.get( "/{connection_id}", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_connection( connection_id: str, @@ -64,14 +70,18 @@ async def get_connection( connection = session.scalar(select(Connection).filter_by(conn_id=connection_id)) if connection is None: - raise HTTPException(404, f"The Connection with connection_id: `{connection_id}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"The Connection with connection_id: `{connection_id}` was not found" + ) return ConnectionResponse.model_validate(connection, from_attributes=True) @connections_router.get( "/", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_connections( limit: QueryLimit, diff --git a/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow/api_fastapi/core_api/routes/public/dag_run.py index 02780d6088e9..7f41573b1dba 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -17,7 +17,7 @@ from __future__ import annotations -from fastapi import Depends, HTTPException, Query, Request +from fastapi import Depends, HTTPException, Query, Request, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -40,21 +40,40 @@ dag_run_router = AirflowRouter(tags=["DagRun"], prefix="/dags/{dag_id}/dagRuns") -@dag_run_router.get("/{dag_run_id}", responses=create_openapi_http_exception_doc([401, 403, 404])) +@dag_run_router.get( + "/{dag_run_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def get_dag_run( dag_id: str, dag_run_id: str, session: Annotated[Session, Depends(get_session)] ) -> DAGRunResponse: dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) if dag_run is None: raise HTTPException( - 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + status.HTTP_404_NOT_FOUND, + f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", ) return DAGRunResponse.model_validate(dag_run, from_attributes=True) @dag_run_router.delete( - "/{dag_run_id}", status_code=204, responses=create_openapi_http_exception_doc([400, 401, 403, 404]) + "/{dag_run_id}", + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), ) async def delete_dag_run(dag_id: str, dag_run_id: str, session: Annotated[Session, Depends(get_session)]): """Delete a DAG Run entry.""" @@ -62,13 +81,24 @@ async def delete_dag_run(dag_id: str, dag_run_id: str, session: Annotated[Sessio if dag_run is None: raise HTTPException( - 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + status.HTTP_404_NOT_FOUND, + f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", ) session.delete(dag_run) -@dag_run_router.patch("/{dag_run_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@dag_run_router.patch( + "/{dag_run_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def patch_dag_run_state( dag_id: str, dag_run_id: str, @@ -81,17 +111,20 @@ async def patch_dag_run_state( dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) if dag_run is None: raise HTTPException( - 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + status.HTTP_404_NOT_FOUND, + f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", ) dag: DAG = request.app.state.dag_bag.get_dag(dag_id) if not dag: - raise HTTPException(404, f"Dag with id {dag_id} was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") if update_mask: if update_mask != ["state"]: - raise HTTPException(400, "Only `state` field can be updated through the REST API") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, "Only `state` field can be updated through the REST API" + ) else: update_mask = ["state"] diff --git a/airflow/api_fastapi/core_api/routes/public/dag_sources.py b/airflow/api_fastapi/core_api/routes/public/dag_sources.py index 2a660a7d0264..3cf046f5b757 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_sources.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_sources.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, Header, HTTPException, Request, Response +from fastapi import Depends, Header, HTTPException, Request, Response, status from itsdangerous import BadSignature, URLSafeSerializer from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -37,7 +37,15 @@ @dag_sources_router.get( "/{file_token}", responses={ - **create_openapi_http_exception_doc([400, 401, 403, 404, 406]), + **create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_406_NOT_ACCEPTABLE, + ] + ), "200": { "description": "Successful Response", "content": { @@ -62,10 +70,10 @@ async def get_dag_source( content=DagCode.code(path, session=session), ) except (BadSignature, FileNotFoundError): - raise HTTPException(404, "DAG source not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, "DAG source not found") if accept.startswith(mime_type_text): return Response(dag_source_model.content, media_type=mime_type_text) if accept.startswith(mime_type_json) or accept.startswith(mime_type_any): return dag_source_model - raise HTTPException(406, "Content not available for Accept header") + raise HTTPException(status.HTTP_406_NOT_ACCEPTABLE, "Content not available for Accept header") diff --git a/airflow/api_fastapi/core_api/routes/public/dag_warning.py b/airflow/api_fastapi/core_api/routes/public/dag_warning.py index a388fae13be1..f445fb0afcf5 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_warning.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_warning.py @@ -17,7 +17,7 @@ from __future__ import annotations -from fastapi import Depends +from fastapi import Depends, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -44,7 +44,10 @@ dag_warning_router = AirflowRouter(tags=["DagWarning"]) -@dag_warning_router.get("/dagWarnings", responses=create_openapi_http_exception_doc([401, 403])) +@dag_warning_router.get( + "/dagWarnings", + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), +) async def list_dag_warnings( dag_id: QueryDagIdInDagWarningFilter, warning_type: QueryWarningTypeFilter, diff --git a/airflow/api_fastapi/core_api/routes/public/dags.py b/airflow/api_fastapi/core_api/routes/public/dags.py index c7b753b5cdbd..7d2ee68fa144 100644 --- a/airflow/api_fastapi/core_api/routes/public/dags.py +++ b/airflow/api_fastapi/core_api/routes/public/dags.py @@ -17,7 +17,7 @@ from __future__ import annotations -from fastapi import Depends, HTTPException, Query, Request, Response +from fastapi import Depends, HTTPException, Query, Request, Response, status from sqlalchemy import select, update from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -99,7 +99,7 @@ async def get_dags( @dags_router.get( "/tags", - responses=create_openapi_http_exception_doc([401, 403]), + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), ) async def get_dag_tags( limit: QueryLimit, @@ -130,18 +130,29 @@ async def get_dag_tags( return DAGTagCollectionResponse(tags=[dag_tag for dag_tag in dag_tags], total_entries=total_entries) -@dags_router.get("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) +@dags_router.get( + "/{dag_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_422_UNPROCESSABLE_ENTITY, + ] + ), +) async def get_dag( dag_id: str, session: Annotated[Session, Depends(get_session)], request: Request ) -> DAGResponse: """Get basic information about a DAG.""" dag: DAG = request.app.state.dag_bag.get_dag(dag_id) if not dag: - raise HTTPException(404, f"Dag with id {dag_id} was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") dag_model: DagModel = session.get(DagModel, dag_id) if not dag_model: - raise HTTPException(404, f"Unable to obtain dag with id {dag_id} from session") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unable to obtain dag with id {dag_id} from session") for key, value in dag.__dict__.items(): if not key.startswith("_") and not hasattr(dag_model, key): @@ -157,11 +168,11 @@ async def get_dag_details( """Get details of DAG.""" dag: DAG = request.app.state.dag_bag.get_dag(dag_id) if not dag: - raise HTTPException(404, f"Dag with id {dag_id} was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") dag_model: DagModel = session.get(DagModel, dag_id) if not dag_model: - raise HTTPException(404, f"Unable to obtain dag with id {dag_id} from session") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unable to obtain dag with id {dag_id} from session") for key, value in dag.__dict__.items(): if not key.startswith("_") and not hasattr(dag_model, key): @@ -170,7 +181,17 @@ async def get_dag_details( return DAGDetailsResponse.model_validate(dag_model, from_attributes=True) -@dags_router.patch("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@dags_router.patch( + "/{dag_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def patch_dag( dag_id: str, patch_body: DAGPatchBody, @@ -181,11 +202,13 @@ async def patch_dag( dag = session.get(DagModel, dag_id) if dag is None: - raise HTTPException(404, f"Dag with id: {dag_id} was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id: {dag_id} was not found") if update_mask: if update_mask != ["is_paused"]: - raise HTTPException(400, "Only `is_paused` field can be updated through the REST API") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, "Only `is_paused` field can be updated through the REST API" + ) else: update_mask = ["is_paused"] @@ -197,7 +220,17 @@ async def patch_dag( return DAGResponse.model_validate(dag, from_attributes=True) -@dags_router.patch("/", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@dags_router.patch( + "/", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def patch_dags( patch_body: DAGPatchBody, limit: QueryLimit, @@ -214,7 +247,9 @@ async def patch_dags( """Patch multiple DAGs.""" if update_mask: if update_mask != ["is_paused"]: - raise HTTPException(400, "Only `is_paused` field can be updated through the REST API") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, "Only `is_paused` field can be updated through the REST API" + ) else: update_mask = ["is_paused"] @@ -244,7 +279,18 @@ async def patch_dags( ) -@dags_router.delete("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) +@dags_router.delete( + "/{dag_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + status.HTTP_422_UNPROCESSABLE_ENTITY, + ] + ), +) async def delete_dag( dag_id: str, session: Annotated[Session, Depends(get_session)], @@ -253,7 +299,9 @@ async def delete_dag( try: delete_dag_module.delete_dag(dag_id, session=session) except DagNotFound: - raise HTTPException(404, f"Dag with id: {dag_id} was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id: {dag_id} was not found") except AirflowException: - raise HTTPException(409, f"Task instances of dag with id: '{dag_id}' are still running") - return Response(status_code=204) + raise HTTPException( + status.HTTP_409_CONFLICT, f"Task instances of dag with id: '{dag_id}' are still running" + ) + return Response(status_code=status.HTTP_204_NO_CONTENT) diff --git a/airflow/api_fastapi/core_api/routes/public/event_logs.py b/airflow/api_fastapi/core_api/routes/public/event_logs.py index 75f12cbefb03..537bb5ffe4df 100644 --- a/airflow/api_fastapi/core_api/routes/public/event_logs.py +++ b/airflow/api_fastapi/core_api/routes/public/event_logs.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -36,7 +36,9 @@ @event_logs_router.get( "/{event_log_id}", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_event_log( event_log_id: int, @@ -44,7 +46,7 @@ async def get_event_log( ) -> EventLogResponse: event_log = session.scalar(select(Log).where(Log.id == event_log_id)) if event_log is None: - raise HTTPException(404, f"The Event Log with id: `{event_log_id}` not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Event Log with id: `{event_log_id}` not found") return EventLogResponse.model_validate( event_log, from_attributes=True, diff --git a/airflow/api_fastapi/core_api/routes/public/pools.py b/airflow/api_fastapi/core_api/routes/public/pools.py index 5690196e850a..99389e0bd6e9 100644 --- a/airflow/api_fastapi/core_api/routes/public/pools.py +++ b/airflow/api_fastapi/core_api/routes/public/pools.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException, Query +from fastapi import Depends, HTTPException, Query, status from fastapi.exceptions import RequestValidationError from pydantic import ValidationError from sqlalchemy import delete, select @@ -41,8 +41,15 @@ @pools_router.delete( "/{pool_name}", - status_code=204, - responses=create_openapi_http_exception_doc([400, 401, 403, 404]), + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), ) async def delete_pool( pool_name: str, @@ -50,17 +57,19 @@ async def delete_pool( ): """Delete a pool entry.""" if pool_name == "default_pool": - raise HTTPException(400, "Default Pool can't be deleted") + raise HTTPException(status.HTTP_400_BAD_REQUEST, "Default Pool can't be deleted") affected_count = session.execute(delete(Pool).where(Pool.pool == pool_name)).rowcount if affected_count == 0: - raise HTTPException(404, f"The Pool with name: `{pool_name}` was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Pool with name: `{pool_name}` was not found") @pools_router.get( "/{pool_name}", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_pool( pool_name: str, @@ -69,14 +78,16 @@ async def get_pool( """Get a pool.""" pool = session.scalar(select(Pool).where(Pool.pool == pool_name)) if pool is None: - raise HTTPException(404, f"The Pool with name: `{pool_name}` was not found") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Pool with name: `{pool_name}` was not found") return PoolResponse.model_validate(pool, from_attributes=True) @pools_router.get( "/", - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_pools( limit: QueryLimit, @@ -105,7 +116,17 @@ async def get_pools( ) -@pools_router.patch("/{pool_name}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@pools_router.patch( + "/{pool_name}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def patch_pool( pool_name: str, patch_body: PoolPatchBody, @@ -118,11 +139,16 @@ async def patch_pool( if update_mask and all(mask.strip() in {"slots", "include_deferred"} for mask in update_mask): pass else: - raise HTTPException(400, "Only slots and included_deferred can be modified on Default Pool") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "Only slots and included_deferred can be modified on Default Pool", + ) pool = session.scalar(select(Pool).where(Pool.pool == pool_name).limit(1)) if not pool: - raise HTTPException(404, detail=f"The Pool with name: `{pool_name}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, detail=f"The Pool with name: `{pool_name}` was not found" + ) if update_mask: data = patch_body.model_dump(include=set(update_mask), by_alias=True) @@ -139,7 +165,11 @@ async def patch_pool( return PoolResponse.model_validate(pool, from_attributes=True) -@pools_router.post("/", status_code=201, responses=create_openapi_http_exception_doc([401, 403])) +@pools_router.post( + "/", + status_code=status.HTTP_201_CREATED, + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), +) async def post_pool( post_body: PoolPostBody, session: Annotated[Session, Depends(get_session)], diff --git a/airflow/api_fastapi/core_api/routes/public/task_instances.py b/airflow/api_fastapi/core_api/routes/public/task_instances.py index c9458e843afe..df16c0bc450d 100644 --- a/airflow/api_fastapi/core_api/routes/public/task_instances.py +++ b/airflow/api_fastapi/core_api/routes/public/task_instances.py @@ -17,7 +17,7 @@ from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, status from sqlalchemy.orm import Session, joinedload from sqlalchemy.sql import select from typing_extensions import Annotated @@ -33,7 +33,12 @@ ) -@task_instances_router.get("/{task_id}", responses=create_openapi_http_exception_doc([401, 403, 404])) +@task_instances_router.get( + "/{task_id}", + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), +) async def get_task_instance( dag_id: str, dag_run_id: str, task_id: str, session: Annotated[Session, Depends(get_session)] ) -> TaskInstanceResponse: @@ -48,17 +53,22 @@ async def get_task_instance( if task_instance is None: raise HTTPException( - 404, + status.HTTP_404_NOT_FOUND, f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}` and task_id: `{task_id}` was not found", ) if task_instance.map_index != -1: - raise HTTPException(404, "Task instance is mapped, add the map_index value to the URL") + raise HTTPException( + status.HTTP_404_NOT_FOUND, "Task instance is mapped, add the map_index value to the URL" + ) return TaskInstanceResponse.model_validate(task_instance, from_attributes=True) @task_instances_router.get( - "/{task_id}/{map_index}", responses=create_openapi_http_exception_doc([401, 403, 404]) + "/{task_id}/{map_index}", + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def get_mapped_task_instance( dag_id: str, @@ -78,7 +88,7 @@ async def get_mapped_task_instance( if task_instance is None: raise HTTPException( - 404, + status.HTTP_404_NOT_FOUND, f"The Mapped Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}`, and map_index: `{map_index}` was not found", ) diff --git a/airflow/api_fastapi/core_api/routes/public/variables.py b/airflow/api_fastapi/core_api/routes/public/variables.py index 6b834a6de758..5d2bf5a899d8 100644 --- a/airflow/api_fastapi/core_api/routes/public/variables.py +++ b/airflow/api_fastapi/core_api/routes/public/variables.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException, Query +from fastapi import Depends, HTTPException, Query, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -38,7 +38,9 @@ @variables_router.delete( "/{variable_key}", status_code=204, - responses=create_openapi_http_exception_doc([401, 403, 404]), + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), ) async def delete_variable( variable_key: str, @@ -46,10 +48,17 @@ async def delete_variable( ): """Delete a variable entry.""" if Variable.delete(variable_key, session) == 0: - raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"The Variable with key: `{variable_key}` was not found" + ) -@variables_router.get("/{variable_key}", responses=create_openapi_http_exception_doc([401, 403, 404])) +@variables_router.get( + "/{variable_key}", + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), +) async def get_variable( variable_key: str, session: Annotated[Session, Depends(get_session)], @@ -58,14 +67,16 @@ async def get_variable( variable = session.scalar(select(Variable).where(Variable.key == variable_key).limit(1)) if variable is None: - raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"The Variable with key: `{variable_key}` was not found" + ) return VariableResponse.model_validate(variable, from_attributes=True) @variables_router.get( "/", - responses=create_openapi_http_exception_doc([401, 403]), + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), ) async def get_variables( limit: QueryLimit, @@ -99,7 +110,17 @@ async def get_variables( ) -@variables_router.patch("/{variable_key}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@variables_router.patch( + "/{variable_key}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + ), +) async def patch_variable( variable_key: str, patch_body: VariableBody, @@ -108,11 +129,15 @@ async def patch_variable( ) -> VariableResponse: """Update a variable by key.""" if patch_body.key != variable_key: - raise HTTPException(400, "Invalid body, key from request body doesn't match uri parameter") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, "Invalid body, key from request body doesn't match uri parameter" + ) non_update_fields = {"key"} variable = session.scalar(select(Variable).filter_by(key=variable_key).limit(1)) if not variable: - raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"The Variable with key: `{variable_key}` was not found" + ) if update_mask: data = patch_body.model_dump(include=set(update_mask) - non_update_fields) else: @@ -122,7 +147,11 @@ async def patch_variable( return variable -@variables_router.post("/", status_code=201, responses=create_openapi_http_exception_doc([401, 403])) +@variables_router.post( + "/", + status_code=status.HTTP_201_CREATED, + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), +) async def post_variable( post_body: VariableBody, session: Annotated[Session, Depends(get_session)], diff --git a/airflow/api_fastapi/core_api/routes/ui/assets.py b/airflow/api_fastapi/core_api/routes/ui/assets.py index 6786bc30ae68..3b98e4f59a3c 100644 --- a/airflow/api_fastapi/core_api/routes/ui/assets.py +++ b/airflow/api_fastapi/core_api/routes/ui/assets.py @@ -17,7 +17,7 @@ from __future__ import annotations -from fastapi import Depends, HTTPException, Request +from fastapi import Depends, HTTPException, Request, status from sqlalchemy import and_, func, select from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -39,12 +39,12 @@ async def next_run_assets( dag = request.app.state.dag_bag.get_dag(dag_id) if not dag: - raise HTTPException(404, f"can't find dag {dag_id}") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"can't find dag {dag_id}") dag_model = DagModel.get_dagmodel(dag_id, session=session) if dag_model is None: - raise HTTPException(404, f"can't find associated dag_model {dag_id}") + raise HTTPException(status.HTTP_404_NOT_FOUND, f"can't find associated dag_model {dag_id}") latest_run = dag_model.get_last_dagrun(session=session) From 5ccfe5c4c7093984e56c79e3ac02f5677ff39af4 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Mon, 4 Nov 2024 23:24:07 +0000 Subject: [PATCH 027/137] Bump `ruff` to `0.7.2` (#43668) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.1 → v0.7.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.1...v0.7.2) --- .pre-commit-config.yaml | 4 ++-- hatch_build.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 20c7a293bff0..9fbfdc9033dc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -360,7 +360,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ["ruff==0.7.1"] + additional_dependencies: ["ruff==0.7.2"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^performance/tests/test_.*.py - id: ruff-format name: Run 'ruff format' @@ -370,7 +370,7 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ["ruff==0.7.1"] + additional_dependencies: ["ruff==0.7.2"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$ - id: replace-bad-characters name: Replace bad characters diff --git a/hatch_build.py b/hatch_build.py index 00832672b081..91b9256b4d03 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -246,7 +246,7 @@ "devel-static-checks": [ "black>=23.12.0", "pre-commit>=3.5.0", - "ruff==0.7.1", + "ruff==0.7.2", "yamllint>=1.33.0", ], "devel-tests": [ From 5f9fdf016da631cd4eb66499b54796d8c2dbbfbd Mon Sep 17 00:00:00 2001 From: GPK Date: Tue, 5 Nov 2024 06:29:29 +0000 Subject: [PATCH 028/137] add config section to standard provider docs index (#43674) --- .../configurations-ref.rst | 18 ++++++++++++++++++ .../index.rst | 1 + 2 files changed, 19 insertions(+) create mode 100644 docs/apache-airflow-providers-standard/configurations-ref.rst diff --git a/docs/apache-airflow-providers-standard/configurations-ref.rst b/docs/apache-airflow-providers-standard/configurations-ref.rst new file mode 100644 index 000000000000..5885c9d91b6e --- /dev/null +++ b/docs/apache-airflow-providers-standard/configurations-ref.rst @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../exts/includes/providers-configurations-ref.rst diff --git a/docs/apache-airflow-providers-standard/index.rst b/docs/apache-airflow-providers-standard/index.rst index eb60662df9d8..00461bc1eb36 100644 --- a/docs/apache-airflow-providers-standard/index.rst +++ b/docs/apache-airflow-providers-standard/index.rst @@ -36,6 +36,7 @@ Operators Sensors + Configuration .. toctree:: :hidden: From 90b98478cc5a3c81e2beab57cd222061d53db87d Mon Sep 17 00:00:00 2001 From: Niko Oliveira Date: Mon, 4 Nov 2024 23:32:04 -0800 Subject: [PATCH 029/137] Fix System test type in breeze (#43670) * Fix System test type in breeze Adjust the behaviour of the `System` test type in Breeze testing tests. Remove the path appending to the beginning of the breeze command (because as we've discussed before with the reorganization of our test directories this creates a non-top level loading of a pytest plugin which pytest disallows). This allow us to still specify the System test type because that option controls other beahviours we need (like disabling db init). * Fix unit test --- dev/breeze/src/airflow_breeze/utils/run_tests.py | 3 +-- dev/breeze/tests/test_pytest_args_for_test_types.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 6379eda9e759..9e9ca2d2660b 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -170,7 +170,6 @@ def get_excluded_provider_args(python_version: str) -> list[str]: "Serialization": [ "tests/serialization", ], - "System": ["tests/system"], "TaskSDK": ["task_sdk/tests"], "WWW": [ "tests/www", @@ -255,7 +254,7 @@ def convert_test_type_to_pytest_args( else: return [INTEGRATION_TESTS] if test_type == "System": - return [SYSTEM_TESTS] + return [] if skip_provider_tests and test_type.startswith("Providers"): return [] if test_type.startswith(PROVIDERS_LIST_EXCLUDE_PREFIX): diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index 94a229802ad3..d4e61b08c604 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -63,7 +63,7 @@ ), ( "System", - ["tests/system"], + [], False, ), ( From b9b06140491d55878954b1a490c76ce7593b6357 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 5 Nov 2024 09:06:04 +0000 Subject: [PATCH 030/137] AIP-72: Remove DAG pickling (#43667) This was a less used part of Airflow and does not make sense to keep it since we are removing DB access as part of AIP-72, I am removing it here. This was missed in Airflow 2.0! * Handle executors in the providers for Airflow <3 support --- airflow/api_connexion/openapi/v1.yaml | 9 - airflow/api_connexion/schemas/dag_schema.py | 1 - .../core_api/openapi/v1-generated.yaml | 42 - .../api_fastapi/core_api/serializers/dags.py | 2 - airflow/cli/cli_config.py | 18 - airflow/cli/commands/dag_command.py | 1 - airflow/cli/commands/dag_processor_command.py | 1 - airflow/cli/commands/scheduler_command.py | 4 +- airflow/cli/commands/task_command.py | 27 +- airflow/dag_processing/manager.py | 13 +- airflow/dag_processing/processor.py | 32 +- airflow/executors/base_executor.py | 3 - airflow/executors/debug_executor.py | 1 - airflow/executors/local_executor.py | 1 - airflow/executors/sequential_executor.py | 2 - airflow/jobs/local_task_job_runner.py | 2 - airflow/jobs/scheduler_job_runner.py | 10 - .../versions/0046_3_0_0_drop_dag_pickling.py | 66 + airflow/models/__init__.py | 3 - airflow/models/dag.py | 44 - airflow/models/dagpickle.py | 56 - airflow/models/taskinstance.py | 20 +- airflow/serialization/pydantic/dag.py | 2 - .../serialization/pydantic/taskinstance.py | 2 - airflow/serialization/serialized_objects.py | 4 - airflow/task/standard_task_runner.py | 1 - .../ui/openapi-gen/requests/schemas.gen.ts | 78 - airflow/ui/openapi-gen/requests/types.gen.ts | 6 - .../ui/src/pages/DagsList/DagCard.test.tsx | 2 - airflow/utils/cli.py | 16 - airflow/utils/db.py | 2 +- airflow/www/static/js/types/api-generated.ts | 7 - dev/perf/scheduler_dag_execution_timing.py | 4 +- dev/perf/sql_queries.py | 2 +- docs/apache-airflow/img/airflow_erd.sha256 | 2 +- docs/apache-airflow/img/airflow_erd.svg | 3492 ++++++++--------- docs/apache-airflow/migrations-ref.rst | 4 +- newsfragments/aip-72.significant.rst | 4 + .../executors/celery_kubernetes_executor.py | 10 +- .../executors/local_kubernetes_executor.py | 9 +- .../celery/executors/test_celery_executor.py | 3 - .../executors/test_kubernetes_executor.py | 3 - .../endpoints/test_dag_endpoint.py | 32 - .../api_connexion/schemas/test_dag_schema.py | 3 - .../core_api/routes/public/test_dags.py | 4 - tests/cli/commands/test_task_command.py | 30 - tests/dag_processing/test_job_runner.py | 88 +- tests/dag_processing/test_processor.py | 11 +- tests/executors/test_base_executor.py | 4 - tests/executors/test_local_executor.py | 3 - tests/executors/test_sequential_executor.py | 3 - .../test_dag_import_error_listener.py | 2 +- tests/models/test_dag.py | 7 - tests/utils/test_cli_util.py | 18 +- tests/utils/test_db_cleanup.py | 1 - tests/www/views/test_views_home.py | 2 +- 56 files changed, 1867 insertions(+), 2352 deletions(-) create mode 100644 airflow/migrations/versions/0046_3_0_0_drop_dag_pickling.py delete mode 100644 airflow/models/dagpickle.py diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index c884c1595411..41d469cd207e 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -2911,15 +2911,6 @@ components: description: | The last time the DAG was parsed. - *New in version 2.3.0* - last_pickled: - type: string - format: date-time - readOnly: true - nullable: true - description: | - The last time the DAG was pickled. - *New in version 2.3.0* last_expired: type: string diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py index f22812abd111..9f75f4dad52f 100644 --- a/airflow/api_connexion/schemas/dag_schema.py +++ b/airflow/api_connexion/schemas/dag_schema.py @@ -54,7 +54,6 @@ class Meta: is_paused = auto_field() is_active = auto_field(dump_only=True) last_parsed_time = auto_field(dump_only=True) - last_pickled = auto_field(dump_only=True) last_expired = auto_field(dump_only=True) default_view = auto_field(dump_only=True) fileloc = auto_field(dump_only=True) diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index abd7f0baf5ed..3dcff4b2d066 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -2370,24 +2370,12 @@ components: format: date-time - type: 'null' title: Last Parsed Time - last_pickled: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Pickled last_expired: anyOf: - type: string format: date-time - type: 'null' title: Last Expired - pickle_id: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Pickle Id default_view: anyOf: - type: string @@ -2541,9 +2529,7 @@ components: - is_paused - is_active - last_parsed_time - - last_pickled - last_expired - - pickle_id - default_view - fileloc - description @@ -2606,24 +2592,12 @@ components: format: date-time - type: 'null' title: Last Parsed Time - last_pickled: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Pickled last_expired: anyOf: - type: string format: date-time - type: 'null' title: Last Expired - pickle_id: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Pickle Id default_view: anyOf: - type: string @@ -2710,9 +2684,7 @@ components: - is_paused - is_active - last_parsed_time - - last_pickled - last_expired - - pickle_id - default_view - fileloc - description @@ -2976,24 +2948,12 @@ components: format: date-time - type: 'null' title: Last Parsed Time - last_pickled: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Pickled last_expired: anyOf: - type: string format: date-time - type: 'null' title: Last Expired - pickle_id: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Pickle Id default_view: anyOf: - type: string @@ -3085,9 +3045,7 @@ components: - is_paused - is_active - last_parsed_time - - last_pickled - last_expired - - pickle_id - default_view - fileloc - description diff --git a/airflow/api_fastapi/core_api/serializers/dags.py b/airflow/api_fastapi/core_api/serializers/dags.py index 6e2c3933e176..27cc3ad47356 100644 --- a/airflow/api_fastapi/core_api/serializers/dags.py +++ b/airflow/api_fastapi/core_api/serializers/dags.py @@ -43,9 +43,7 @@ class DAGResponse(BaseModel): is_paused: bool is_active: bool last_parsed_time: datetime | None - last_pickled: datetime | None last_expired: datetime | None - pickle_id: datetime | None default_view: str | None fileloc: str description: str | None diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index 06ac2f7bd817..e93d5e25c631 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -576,10 +576,6 @@ def string_lower_type(val): choices={"check", "ignore", "wait"}, default="check", ) -ARG_SHIP_DAG = Arg( - ("--ship-dag",), help="Pickles (serializes) the DAG and ships it to the worker", action="store_true" -) -ARG_PICKLE = Arg(("-p", "--pickle"), help="Serialized pickle object of the entire dag (used internally)") ARG_CFG_PATH = Arg(("--cfg-path",), help="Path to config file to use instead of airflow.cfg") ARG_MAP_INDEX = Arg(("--map-index",), type=int, default=-1, help="Mapped task index") ARG_READ_FROM_DB = Arg(("--read-from-db",), help="Read dag from DB instead of dag file", action="store_true") @@ -795,16 +791,6 @@ def string_lower_type(val): type=int, help="Set the number of runs to execute before exiting", ) -ARG_DO_PICKLE = Arg( - ("-p", "--do-pickle"), - default=False, - help=( - "Attempt to pickle the DAG object to send over " - "to the workers, instead of letting workers run their version " - "of the code" - ), - action="store_true", -) ARG_WITHOUT_MINGLE = Arg( ("--without-mingle",), @@ -1351,8 +1337,6 @@ class GroupCommand(NamedTuple): ARG_IGNORE_ALL_DEPENDENCIES, ARG_IGNORE_DEPENDENCIES, ARG_DEPENDS_ON_PAST, - ARG_SHIP_DAG, - ARG_PICKLE, ARG_INTERACTIVE, ARG_SHUT_DOWN_LOGGING, ARG_MAP_INDEX, @@ -1968,7 +1952,6 @@ class GroupCommand(NamedTuple): args=( ARG_SUBDIR, ARG_NUM_RUNS, - ARG_DO_PICKLE, ARG_PID, ARG_DAEMON, ARG_STDOUT, @@ -2010,7 +1993,6 @@ class GroupCommand(NamedTuple): ARG_DAEMON, ARG_SUBDIR, ARG_NUM_RUNS, - ARG_DO_PICKLE, ARG_STDOUT, ARG_STDERR, ARG_LOG_FILE, diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 92d1825dc627..dfff75ee2d6c 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -225,7 +225,6 @@ def _get_dagbag_dag_details(dag: DAG) -> dict: "is_paused": dag.get_is_paused(), "is_active": dag.get_is_active(), "last_parsed_time": None, - "last_pickled": None, "last_expired": None, "default_view": dag.default_view, "fileloc": dag.fileloc, diff --git a/airflow/cli/commands/dag_processor_command.py b/airflow/cli/commands/dag_processor_command.py index 8ec173ba5202..eea1c0db20dc 100644 --- a/airflow/cli/commands/dag_processor_command.py +++ b/airflow/cli/commands/dag_processor_command.py @@ -48,7 +48,6 @@ def _create_dag_processor_job_runner(args: Any) -> DagProcessorJobRunner: dag_directory=args.subdir, max_runs=args.num_runs, dag_ids=[], - pickle_dags=args.do_pickle, ), ) diff --git a/airflow/cli/commands/scheduler_command.py b/airflow/cli/commands/scheduler_command.py index 96cfe1e2852f..4c4b751e2c50 100644 --- a/airflow/cli/commands/scheduler_command.py +++ b/airflow/cli/commands/scheduler_command.py @@ -39,9 +39,7 @@ def _run_scheduler_job(args) -> None: - job_runner = SchedulerJobRunner( - job=Job(), subdir=process_subdir(args.subdir), num_runs=args.num_runs, do_pickle=args.do_pickle - ) + job_runner = SchedulerJobRunner(job=Job(), subdir=process_subdir(args.subdir), num_runs=args.num_runs) ExecutorLoader.validate_database_executor_compatibility(job_runner.job.executor.__class__) enable_health_check = conf.getboolean("scheduler", "ENABLE_HEALTH_CHECK") with _serve_logs(args.skip_serve_logs), _serve_health_check(enable_health_check): diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index 03d2737072f3..e14c18399555 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -42,7 +42,7 @@ from airflow.jobs.job import Job, run_job from airflow.jobs.local_task_job_runner import LocalTaskJobRunner from airflow.listeners.listener import get_listener_manager -from airflow.models import DagPickle, TaskInstance +from airflow.models import TaskInstance from airflow.models.dag import DAG, _run_inline_trigger from airflow.models.dagrun import DagRun from airflow.models.param import ParamsDict @@ -56,7 +56,6 @@ from airflow.utils.cli import ( get_dag, get_dag_by_file_location, - get_dag_by_pickle, get_dags, should_ignore_depends_on_past, suppress_logs_and_warning, @@ -266,20 +265,6 @@ def _run_task_by_executor(args, dag: DAG, ti: TaskInstance) -> None: This can result in the task being started by another host if the executor implementation does. """ - pickle_id = None - if args.ship_dag: - try: - # Running remotely, so pickling the DAG - with create_session() as session: - pickle = DagPickle(dag) - session.add(pickle) - pickle_id = pickle.id - # TODO: This should be written to a log - print(f"Pickled dag {dag} as pickle_id: {pickle_id}") - except Exception as e: - print("Could not pickle the DAG") - print(e) - raise e if ti.executor: executor = ExecutorLoader.load_executor(ti.executor) else: @@ -290,7 +275,6 @@ def _run_task_by_executor(args, dag: DAG, ti: TaskInstance) -> None: executor.queue_task_instance( ti, mark_success=args.mark_success, - pickle_id=pickle_id, ignore_all_deps=args.ignore_all_dependencies, ignore_depends_on_past=should_ignore_depends_on_past(args), wait_for_past_depends_before_skipping=(args.depends_on_past == "wait"), @@ -311,7 +295,6 @@ def _run_task_by_local_task_job(args, ti: TaskInstance | TaskInstancePydantic) - job=Job(dag_id=ti.dag_id), task_instance=ti, mark_success=args.mark_success, - pickle_id=args.pickle, ignore_all_deps=args.ignore_all_dependencies, ignore_depends_on_past=should_ignore_depends_on_past(args), wait_for_past_depends_before_skipping=(args.depends_on_past == "wait"), @@ -435,8 +418,7 @@ def task_run(args, dag: DAG | None = None) -> TaskReturnCode | None: f"You provided the option {unsupported_flags}. " "Delete it to execute the command." ) - if dag and args.pickle: - raise AirflowException("You cannot use the --pickle option when using DAG.cli() method.") + if args.cfg_path: with open(args.cfg_path) as conf_file: conf_dict = json.load(conf_file) @@ -451,10 +433,7 @@ def task_run(args, dag: DAG | None = None) -> TaskReturnCode | None: get_listener_manager().hook.on_starting(component=TaskCommandMarker()) - if args.pickle: - print(f"Loading pickle id: {args.pickle}") - _dag = get_dag_by_pickle(args.pickle) - elif not dag: + if not dag: _dag = get_dag(args.subdir, args.dag_id, args.read_from_db) else: _dag = dag diff --git a/airflow/dag_processing/manager.py b/airflow/dag_processing/manager.py index 05fb72daee60..0f3441a5d4d1 100644 --- a/airflow/dag_processing/manager.py +++ b/airflow/dag_processing/manager.py @@ -117,7 +117,6 @@ class DagFileProcessorAgent(LoggingMixin, MultiprocessingStartMethodMixin): for unlimited. :param processor_timeout: How long to wait before timing out a DAG file processor :param dag_ids: if specified, only schedule tasks with these DAG IDs - :param pickle_dags: whether to pickle DAGs. :param async_mode: Whether to start agent in async mode """ @@ -127,7 +126,6 @@ def __init__( max_runs: int, processor_timeout: timedelta, dag_ids: list[str] | None, - pickle_dags: bool, async_mode: bool, ): super().__init__() @@ -135,7 +133,6 @@ def __init__( self._max_runs = max_runs self._processor_timeout = processor_timeout self._dag_ids = dag_ids - self._pickle_dags = pickle_dags self._async_mode = async_mode # Map from file path to the processor self._processors: dict[str, DagFileProcessorProcess] = {} @@ -163,7 +160,6 @@ def start(self) -> None: self._processor_timeout, child_signal_conn, self._dag_ids, - self._pickle_dags, self._async_mode, ), ) @@ -223,7 +219,6 @@ def _run_processor_manager( processor_timeout: timedelta, signal_conn: MultiprocessingConnection, dag_ids: list[str] | None, - pickle_dags: bool, async_mode: bool, ) -> None: # Make this process start as a new process group - that makes it easy @@ -240,7 +235,6 @@ def _run_processor_manager( max_runs=max_runs, processor_timeout=processor_timeout, dag_ids=dag_ids, - pickle_dags=pickle_dags, signal_conn=signal_conn, async_mode=async_mode, ) @@ -353,7 +347,6 @@ class DagFileProcessorManager(LoggingMixin): :param processor_timeout: How long to wait before timing out a DAG file processor :param signal_conn: connection to communicate signal with processor agent. :param dag_ids: if specified, only schedule tasks with these DAG IDs - :param pickle_dags: whether to pickle DAGs. :param async_mode: whether to start the manager in async mode """ @@ -372,7 +365,6 @@ def __init__( max_runs: int, processor_timeout: timedelta, dag_ids: list[str] | None, - pickle_dags: bool, signal_conn: MultiprocessingConnection | None = None, async_mode: bool = True, ): @@ -383,7 +375,6 @@ def __init__( self._max_runs = max_runs # signal_conn is None for dag_processor_standalone mode. self._direct_scheduler_conn = signal_conn - self._pickle_dags = pickle_dags self._dag_ids = dag_ids self._async_mode = async_mode self._parsing_start_time: float | None = None @@ -1191,11 +1182,10 @@ def collect_results(self) -> None: self.log.debug("%s file paths queued for processing", len(self._file_path_queue)) @staticmethod - def _create_process(file_path, pickle_dags, dag_ids, dag_directory, callback_requests): + def _create_process(file_path, dag_ids, dag_directory, callback_requests): """Create DagFileProcessorProcess instance.""" return DagFileProcessorProcess( file_path=file_path, - pickle_dags=pickle_dags, dag_ids=dag_ids, dag_directory=dag_directory, callback_requests=callback_requests, @@ -1217,7 +1207,6 @@ def start_new_processes(self): callback_to_execute_for_file = self._callback_to_execute[file_path] processor = self._create_process( file_path, - self._pickle_dags, self._dag_ids, self.get_dag_directory(), callback_to_execute_for_file, diff --git a/airflow/dag_processing/processor.py b/airflow/dag_processing/processor.py index 8694f5890ccd..394e09245127 100644 --- a/airflow/dag_processing/processor.py +++ b/airflow/dag_processing/processor.py @@ -91,7 +91,6 @@ class DagFileProcessorProcess(LoggingMixin, MultiprocessingStartMethodMixin): Runs DAG processing in a separate process using DagFileProcessor. :param file_path: a Python file containing Airflow DAG definitions - :param pickle_dags: whether to serialize the DAG objects to the DB :param dag_ids: If specified, only look at these DAG ID's :param callback_requests: failure callback to execute """ @@ -102,14 +101,12 @@ class DagFileProcessorProcess(LoggingMixin, MultiprocessingStartMethodMixin): def __init__( self, file_path: str, - pickle_dags: bool, dag_ids: list[str] | None, dag_directory: str, callback_requests: list[CallbackRequest], ): super().__init__() self._file_path = file_path - self._pickle_dags = pickle_dags self._dag_ids = dag_ids self._dag_directory = dag_directory self._callback_requests = callback_requests @@ -138,7 +135,6 @@ def _run_file_processor( result_channel: MultiprocessingConnection, parent_channel: MultiprocessingConnection, file_path: str, - pickle_dags: bool, dag_ids: list[str] | None, thread_name: str, dag_directory: str, @@ -150,8 +146,6 @@ def _run_file_processor( :param result_channel: the connection to use for passing back the result :param parent_channel: the parent end of the channel to close in the child :param file_path: the file to process - :param pickle_dags: whether to pickle the DAGs found in the file and - save them to the DB :param dag_ids: if specified, only examine DAG ID's that are in this list :param thread_name: the name to use for the process that is launched @@ -182,7 +176,6 @@ def _handle_dag_file_processing(): dag_file_processor = DagFileProcessor(dag_ids=dag_ids, dag_directory=dag_directory, log=log) result: tuple[int, int, int] = dag_file_processor.process_file( file_path=file_path, - pickle_dags=pickle_dags, callback_requests=callback_requests, ) result_channel.send(result) @@ -245,7 +238,6 @@ def start(self) -> None: _child_channel, _parent_channel, self.file_path, - self._pickle_dags, self._dag_ids, f"DagFileProcessor{self._instance_id}", self._dag_directory, @@ -416,8 +408,7 @@ class DagFileProcessor(LoggingMixin): 1. Execute the file and look for DAG objects in the namespace. 2. Execute any Callbacks if passed to DagFileProcessor.process_file 3. Serialize the DAGs and save it to DB (or update existing record in the DB). - 4. Pickle the DAG and save it to the DB (if necessary). - 5. Record any errors importing the file into ORM + 4. Record any errors importing the file into ORM Returns a tuple of 'number of dags found' and 'the count of import errors' @@ -709,7 +700,6 @@ def process_file( self, file_path: str, callback_requests: list[CallbackRequest], - pickle_dags: bool = False, session: Session = NEW_SESSION, ) -> tuple[int, int, int]: """ @@ -720,14 +710,11 @@ def process_file( 1. Execute the file and look for DAG objects in the namespace. 2. Execute any Callbacks if passed to this method. 3. Serialize the DAGs and save it to DB (or update existing record in the DB). - 4. Pickle the DAG and save it to the DB (if necessary). - 5. Mark any DAGs which are no longer present as inactive - 6. Record any errors importing the file into ORM + 4. Mark any DAGs which are no longer present as inactive + 5. Record any errors importing the file into ORM :param file_path: the path to the Python file that should be executed :param callback_requests: failure callback to execute - :param pickle_dags: whether serialize the DAGs found in the file and - save them to the db :return: number of dags found, count of import errors, last number of db queries """ self.log.info("Processing file %s for tasks to queue", file_path) @@ -761,7 +748,6 @@ def process_file( serialize_errors = DagFileProcessor.save_dag_to_db( dags=dagbag.dags, dag_directory=self._dag_directory, - pickle_dags=pickle_dags, ) dagbag.import_errors.update(dict(serialize_errors)) @@ -795,20 +781,8 @@ def _cache_last_num_of_db_queries(self, query_counter: _QueryCounter | None = No def save_dag_to_db( dags: dict[str, DAG], dag_directory: str, - pickle_dags: bool = False, session=NEW_SESSION, ): import_errors = DagBag._sync_to_db(dags=dags, processor_subdir=dag_directory, session=session) session.commit() - - dag_ids = list(dags) - - if pickle_dags: - paused_dag_ids = DagModel.get_paused_dag_ids(dag_ids=dag_ids) - - unpaused_dags: list[DAG] = [dag for dag_id, dag in dags.items() if dag_id not in paused_dag_ids] - - for dag in unpaused_dags: - dag.pickle(session) - return import_errors diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index 87f496fb0540..fba6d96969a1 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -112,7 +112,6 @@ class BaseExecutor(LoggingMixin): """ supports_ad_hoc_ti_run: bool = False - supports_pickling: bool = True supports_sentry: bool = False is_local: bool = False @@ -172,7 +171,6 @@ def queue_task_instance( self, task_instance: TaskInstance, mark_success: bool = False, - pickle_id: int | None = None, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, wait_for_past_depends_before_skipping: bool = False, @@ -196,7 +194,6 @@ def queue_task_instance( ignore_task_deps=ignore_task_deps, ignore_ti_state=ignore_ti_state, pool=pool, - pickle_id=pickle_id, # cfg_path is needed to propagate the config values if using impersonation # (run_as_user), given that there are different code paths running tasks. # https://github.com/apache/airflow/pull/2991 diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index aead7e2b2c11..525c80791e37 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -97,7 +97,6 @@ def queue_task_instance( self, task_instance: TaskInstance, mark_success: bool = False, - pickle_id: int | None = None, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, wait_for_past_depends_before_skipping: bool = False, diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index f28e525ec3ac..a39a206af507 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -223,7 +223,6 @@ class LocalExecutor(BaseExecutor): """ is_local: bool = True - supports_pickling: bool = False serve_logs: bool = True diff --git a/airflow/executors/sequential_executor.py b/airflow/executors/sequential_executor.py index 0b4cbdea9dd4..1fca95acd3b0 100644 --- a/airflow/executors/sequential_executor.py +++ b/airflow/executors/sequential_executor.py @@ -48,8 +48,6 @@ class SequentialExecutor(BaseExecutor): SequentialExecutor alongside sqlite as you first install it. """ - supports_pickling: bool = False - is_local: bool = True is_single_threaded: bool = True is_production: bool = False diff --git a/airflow/jobs/local_task_job_runner.py b/airflow/jobs/local_task_job_runner.py index c900c88674e7..599493ea58c4 100644 --- a/airflow/jobs/local_task_job_runner.py +++ b/airflow/jobs/local_task_job_runner.py @@ -90,7 +90,6 @@ def __init__( ignore_task_deps: bool = False, ignore_ti_state: bool = False, mark_success: bool = False, - pickle_id: int | None = None, pool: str | None = None, external_executor_id: str | None = None, ): @@ -103,7 +102,6 @@ def __init__( self.ignore_task_deps = ignore_task_deps self.ignore_ti_state = ignore_ti_state self.pool = pool - self.pickle_id = pickle_id self.mark_success = mark_success self.external_executor_id = external_executor_id # terminating state is used so that a job don't try to diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index 39e4e35087bc..fb85a4a73cc3 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -156,8 +156,6 @@ class SchedulerJobRunner(BaseJobRunner, LoggingMixin): -1 for unlimited times. :param scheduler_idle_sleep_time: The number of seconds to wait between polls of running processors - :param do_pickle: once a DAG object is obtained by executing the Python - file, whether to serialize the DAG object to the DB :param log: override the default Logger """ @@ -170,7 +168,6 @@ def __init__( num_runs: int = conf.getint("scheduler", "num_runs"), num_times_parse_dags: int = -1, scheduler_idle_sleep_time: float = conf.getfloat("scheduler", "scheduler_idle_sleep_time"), - do_pickle: bool = False, log: logging.Logger | None = None, ): super().__init__(job) @@ -187,8 +184,6 @@ def __init__( self._dag_stale_not_seen_duration = conf.getint("scheduler", "dag_stale_not_seen_duration") self._task_queued_timeout = conf.getfloat("scheduler", "task_queued_timeout") - self.do_pickle = do_pickle - self._enable_tracemalloc = conf.getboolean("scheduler", "enable_tracemalloc") if self._enable_tracemalloc: import tracemalloc @@ -639,7 +634,6 @@ def _enqueue_task_instances_with_queued_state( continue command = ti.command_as_list( local=True, - pickle_id=ti.dag_model.pickle_id, ) priority = ti.priority_weight @@ -923,9 +917,6 @@ def _execute(self) -> int | None: executor_class, _ = ExecutorLoader.import_default_executor_cls() - # DAGs can be pickled for easier remote execution by some executors - pickle_dags = self.do_pickle and executor_class.supports_pickling - self.log.info("Processing each file at most %s times", self.num_times_parse_dags) # When using sqlite, we do not use async_mode @@ -940,7 +931,6 @@ def _execute(self) -> int | None: max_runs=self.num_times_parse_dags, processor_timeout=processor_timeout, dag_ids=[], - pickle_dags=pickle_dags, async_mode=async_mode, ) diff --git a/airflow/migrations/versions/0046_3_0_0_drop_dag_pickling.py b/airflow/migrations/versions/0046_3_0_0_drop_dag_pickling.py new file mode 100644 index 000000000000..599759fa9f86 --- /dev/null +++ b/airflow/migrations/versions/0046_3_0_0_drop_dag_pickling.py @@ -0,0 +1,66 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Drop DAG pickling. + +Revision ID: d03e4a635aa3 +Revises: d8cd3297971e +Create Date: 2024-11-04 22:07:51.329843 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import TIMESTAMP + +# revision identifiers, used by Alembic. +revision = "d03e4a635aa3" +down_revision = "d8cd3297971e" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + """Drop DAG pickling.""" + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.drop_column("pickle_id") + batch_op.drop_column("last_pickled") + + op.drop_table("dag_pickle") + + +def downgrade(): + """Re-Add DAG pickling.""" + import dill + + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.add_column(sa.Column("last_pickled", sa.TIMESTAMP(), nullable=True)) + batch_op.add_column(sa.Column("pickle_id", sa.INTEGER(), nullable=True)) + + op.create_table( + "dag_pickle", + sa.Column("id", sa.Integer(), primary_key=True, nullable=False), + sa.Column("pickle", sa.PickleType(pickler=dill), nullable=True), + sa.Column("created_dttm", TIMESTAMP(timezone=True), nullable=True), + sa.Column("pickle_hash", sa.BigInteger, nullable=True), + ) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 7e71dddc65df..1ab4e5584c97 100644 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -30,7 +30,6 @@ "DagBag", "DagWarning", "DagModel", - "DagPickle", "DagRun", "DagTag", "DbCallbackRequest", @@ -90,7 +89,6 @@ def __getattr__(name): "Connection": "airflow.models.connection", "DagBag": "airflow.models.dagbag", "DagModel": "airflow.models.dag", - "DagPickle": "airflow.models.dagpickle", "DagRun": "airflow.models.dagrun", "DagTag": "airflow.models.dag", "DagWarning": "airflow.models.dagwarning", @@ -119,7 +117,6 @@ def __getattr__(name): from airflow.models.connection import Connection from airflow.models.dag import DAG, DagModel, DagTag from airflow.models.dagbag import DagBag - from airflow.models.dagpickle import DagPickle from airflow.models.dagrun import DagRun from airflow.models.dagwarning import DagWarning from airflow.models.db_callback_request import DbCallbackRequest diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 851d2a512934..337fc5c8163e 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -22,10 +22,8 @@ import functools import logging import pathlib -import pickle import sys import time -import traceback from collections import defaultdict from contextlib import ExitStack from datetime import datetime, timedelta @@ -88,7 +86,6 @@ from airflow.models.base import Base, StringID from airflow.models.baseoperator import BaseOperator from airflow.models.dagcode import DagCode -from airflow.models.dagpickle import DagPickle from airflow.models.dagrun import RUN_ID_REGEX, DagRun from airflow.models.taskinstance import ( Context, @@ -739,14 +736,6 @@ def dag_id(self, value: str) -> None: def timetable_summary(self) -> str: return self.timetable.summary - @property - def pickle_id(self) -> int | None: - return self._pickle_id - - @pickle_id.setter - def pickle_id(self, value: int) -> None: - self._pickle_id = value - @property def relative_fileloc(self) -> pathlib.Path: """File location of the importable dag 'file' relative to the configured DAGs folder.""" @@ -1549,35 +1538,6 @@ def clear_dags( print("Cancelled, nothing was cleared.") return count - def pickle_info(self): - d = {} - d["is_picklable"] = True - try: - dttm = timezone.utcnow() - pickled = pickle.dumps(self) - d["pickle_len"] = len(pickled) - d["pickling_duration"] = str(timezone.utcnow() - dttm) - except Exception as e: - self.log.debug(e) - d["is_picklable"] = False - d["stacktrace"] = traceback.format_exc() - return d - - @provide_session - def pickle(self, session=NEW_SESSION) -> DagPickle: - dag = session.scalar(select(DagModel).where(DagModel.dag_id == self.dag_id).limit(1)) - dp = None - if dag and dag.pickle_id: - dp = session.scalar(select(DagPickle).where(DagPickle.id == dag.pickle_id).limit(1)) - if not dp or dp.pickle != self: - dp = DagPickle(dag=self) - session.add(dp) - self.last_pickled = timezone.utcnow() - session.commit() - self.pickle_id = dp.id - - return dp - def cli(self): """Exposes a CLI specific to this DAG.""" check_cycle(self) @@ -2041,13 +2001,9 @@ class DagModel(Base): is_active = Column(Boolean, default=False) # Last time the scheduler started last_parsed_time = Column(UtcDateTime) - # Last time this DAG was pickled - last_pickled = Column(UtcDateTime) # Time when the DAG last received a refresh signal # (e.g. the DAG's "refresh" button was clicked in the web UI) last_expired = Column(UtcDateTime) - # Foreign key to the latest pickle_id - pickle_id = Column(Integer) # The location of the file containing the DAG object # Note: Do not depend on fileloc pointing to a file; in the case of a # packaged DAG, it will point to the subpath of the DAG within the diff --git a/airflow/models/dagpickle.py b/airflow/models/dagpickle.py deleted file mode 100644 index c06ef09709f1..000000000000 --- a/airflow/models/dagpickle.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import TYPE_CHECKING - -import dill -from sqlalchemy import BigInteger, Column, Integer, PickleType - -from airflow.models.base import Base -from airflow.utils import timezone -from airflow.utils.sqlalchemy import UtcDateTime - -if TYPE_CHECKING: - from airflow.models.dag import DAG - - -class DagPickle(Base): - """ - Represents a version of a DAG and becomes a source of truth for an execution. - - Dags can originate from different places (user repos, main repo, ...) and also get executed - in different places (different executors). A pickle is a native python serialized object, - and in this case gets stored in the database for the duration of the job. - - The executors pick up the DagPickle id and read the dag definition from the database. - """ - - id = Column(Integer, primary_key=True) - pickle = Column(PickleType(pickler=dill)) - created_dttm = Column(UtcDateTime, default=timezone.utcnow) - pickle_hash = Column(BigInteger) - - __tablename__ = "dag_pickle" - - def __init__(self, dag: DAG) -> None: - self.dag_id = dag.dag_id - if hasattr(dag, "template_env"): - dag.template_env = None # type: ignore[attr-defined] - self.pickle_hash = hash(dag) - self.pickle = dag diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index e86c47778246..dfd776e685a0 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -2030,7 +2030,6 @@ def _command_as_list( wait_for_past_depends_before_skipping: bool = False, ignore_ti_state: bool = False, local: bool = False, - pickle_id: int | None = None, raw: bool = False, pool: str | None = None, cfg_path: str | None = None, @@ -2047,14 +2046,11 @@ def _command_as_list( if dag is None: raise ValueError("DagModel is empty") - should_pass_filepath = not pickle_id and dag - path: PurePath | None = None - if should_pass_filepath: - path = dag.relative_fileloc + path = dag.relative_fileloc - if path: - if not path.is_absolute(): - path = "DAGS_FOLDER" / path + if path: + if not path.is_absolute(): + path = "DAGS_FOLDER" / path return TaskInstance.generate_command( ti.dag_id, @@ -2067,7 +2063,6 @@ def _command_as_list( wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, ignore_ti_state=ignore_ti_state, local=local, - pickle_id=pickle_id, file_path=path, raw=raw, pool=pool, @@ -2084,7 +2079,6 @@ def command_as_list( wait_for_past_depends_before_skipping: bool = False, ignore_ti_state: bool = False, local: bool = False, - pickle_id: int | None = None, raw: bool = False, pool: str | None = None, cfg_path: str | None = None, @@ -2103,7 +2097,6 @@ def command_as_list( wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, ignore_ti_state=ignore_ti_state, local=local, - pickle_id=pickle_id, raw=raw, pool=pool, cfg_path=cfg_path, @@ -2121,7 +2114,6 @@ def generate_command( ignore_task_deps: bool = False, ignore_ti_state: bool = False, local: bool = False, - pickle_id: int | None = None, file_path: PurePath | str | None = None, raw: bool = False, pool: str | None = None, @@ -2144,8 +2136,6 @@ def generate_command( and trigger rule :param ignore_ti_state: Ignore the task instance's previous failure/success :param local: Whether to run the task locally - :param pickle_id: If the DAG was serialized to the DB, the ID - associated with the pickled DAG :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) :param pool: the Airflow pool that the task should run in @@ -2155,8 +2145,6 @@ def generate_command( cmd = ["airflow", "tasks", "run", dag_id, task_id, run_id] if mark_success: cmd.extend(["--mark-success"]) - if pickle_id: - cmd.extend(["--pickle", str(pickle_id)]) if ignore_all_deps: cmd.extend(["--ignore-all-dependencies"]) if ignore_task_deps: diff --git a/airflow/serialization/pydantic/dag.py b/airflow/serialization/pydantic/dag.py index 4e37a633da05..83bbea760054 100644 --- a/airflow/serialization/pydantic/dag.py +++ b/airflow/serialization/pydantic/dag.py @@ -80,9 +80,7 @@ class DagModelPydantic(BaseModelPydantic): is_paused: bool = is_paused_at_creation is_active: Optional[bool] = False last_parsed_time: Optional[datetime] - last_pickled: Optional[datetime] last_expired: Optional[datetime] - pickle_id: Optional[int] fileloc: str processor_subdir: Optional[str] owners: Optional[str] diff --git a/airflow/serialization/pydantic/taskinstance.py b/airflow/serialization/pydantic/taskinstance.py index bf121353ca80..d5573922b839 100644 --- a/airflow/serialization/pydantic/taskinstance.py +++ b/airflow/serialization/pydantic/taskinstance.py @@ -486,7 +486,6 @@ def command_as_list( wait_for_past_depends_before_skipping: bool = False, ignore_ti_state: bool = False, local: bool = False, - pickle_id: int | None = None, raw: bool = False, pool: str | None = None, cfg_path: str | None = None, @@ -505,7 +504,6 @@ def command_as_list( wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, ignore_ti_state=ignore_ti_state, local=local, - pickle_id=pickle_id, raw=raw, pool=pool, cfg_path=cfg_path, diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 79403860f5fa..52b0bcb1530a 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -1577,10 +1577,6 @@ class SerializedDAG(DAG, BaseSerialization): A stringified DAG can only be used in the scope of scheduler and webserver, because fields that are not serializable, such as functions and customer defined classes, are casted to strings. - - Compared with SimpleDAG: SerializedDAG contains all information for webserver. - Compared with DagPickle: DagPickle contains all information for worker, but some DAGs are - not pickle-able. SerializedDAG works for all DAGs. """ _decorated_fields = {"default_args", "access_control"} diff --git a/airflow/task/standard_task_runner.py b/airflow/task/standard_task_runner.py index a5641002c961..bc846574f024 100644 --- a/airflow/task/standard_task_runner.py +++ b/airflow/task/standard_task_runner.py @@ -99,7 +99,6 @@ def __init__(self, job_runner: LocalTaskJobRunner): self._cfg_path = cfg_path self._command = popen_prepend + self._task_instance.command_as_list( raw=True, - pickle_id=self.job_runner.pickle_id, mark_success=self.job_runner.mark_success, pool=self.job_runner.pool, cfg_path=cfg_path, diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index cf51451c98bd..906bb43df988 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -313,18 +313,6 @@ export const $DAGDetailsResponse = { ], title: "Last Parsed Time", }, - last_pickled: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Last Pickled", - }, last_expired: { anyOf: [ { @@ -337,18 +325,6 @@ export const $DAGDetailsResponse = { ], title: "Last Expired", }, - pickle_id: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Pickle Id", - }, default_view: { anyOf: [ { @@ -631,9 +607,7 @@ export const $DAGDetailsResponse = { "is_paused", "is_active", "last_parsed_time", - "last_pickled", "last_expired", - "pickle_id", "default_view", "fileloc", "description", @@ -712,18 +686,6 @@ export const $DAGResponse = { ], title: "Last Parsed Time", }, - last_pickled: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Last Pickled", - }, last_expired: { anyOf: [ { @@ -736,18 +698,6 @@ export const $DAGResponse = { ], title: "Last Expired", }, - pickle_id: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Pickle Id", - }, default_view: { anyOf: [ { @@ -899,9 +849,7 @@ export const $DAGResponse = { "is_paused", "is_active", "last_parsed_time", - "last_pickled", "last_expired", - "pickle_id", "default_view", "fileloc", "description", @@ -1267,18 +1215,6 @@ export const $DAGWithLatestDagRunsResponse = { ], title: "Last Parsed Time", }, - last_pickled: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Last Pickled", - }, last_expired: { anyOf: [ { @@ -1291,18 +1227,6 @@ export const $DAGWithLatestDagRunsResponse = { ], title: "Last Expired", }, - pickle_id: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Pickle Id", - }, default_view: { anyOf: [ { @@ -1461,9 +1385,7 @@ export const $DAGWithLatestDagRunsResponse = { "is_paused", "is_active", "last_parsed_time", - "last_pickled", "last_expired", - "pickle_id", "default_view", "fileloc", "description", diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 18d5bc296eb2..afe771e5a1f6 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -82,9 +82,7 @@ export type DAGDetailsResponse = { is_paused: boolean; is_active: boolean; last_parsed_time: string | null; - last_pickled: string | null; last_expired: string | null; - pickle_id: string | null; default_view: string | null; fileloc: string; description: string | null; @@ -143,9 +141,7 @@ export type DAGResponse = { is_paused: boolean; is_active: boolean; last_parsed_time: string | null; - last_pickled: string | null; last_expired: string | null; - pickle_id: string | null; default_view: string | null; fileloc: string; description: string | null; @@ -272,9 +268,7 @@ export type DAGWithLatestDagRunsResponse = { is_paused: boolean; is_active: boolean; last_parsed_time: string | null; - last_pickled: string | null; last_expired: string | null; - pickle_id: string | null; default_view: string | null; fileloc: string; description: string | null; diff --git a/airflow/ui/src/pages/DagsList/DagCard.test.tsx b/airflow/ui/src/pages/DagsList/DagCard.test.tsx index 3e60146baa05..447058633434 100644 --- a/airflow/ui/src/pages/DagsList/DagCard.test.tsx +++ b/airflow/ui/src/pages/DagsList/DagCard.test.tsx @@ -43,7 +43,6 @@ const mockDag = { is_paused: false, last_expired: null, last_parsed_time: "2024-08-22T13:50:10.372238+00:00", - last_pickled: null, latest_dag_runs: [], max_active_runs: 16, max_active_tasks: 16, @@ -53,7 +52,6 @@ const mockDag = { next_dagrun_data_interval_end: "2024-08-23T00:00:00+00:00", next_dagrun_data_interval_start: "2024-08-22T00:00:00+00:00", owners: ["airflow"], - pickle_id: null, tags: [], timetable_description: "", timetable_summary: "", diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index 1142c5ba0b62..81b09f9d1104 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -32,7 +32,6 @@ from typing import TYPE_CHECKING, Callable, TypeVar, cast import re2 -from sqlalchemy import select from airflow import settings from airflow.api_internal.internal_api_call import InternalApiConfig @@ -41,13 +40,10 @@ from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler from airflow.utils.log.secrets_masker import should_hide_value_for_key from airflow.utils.platform import getuser, is_terminal_support_colors -from airflow.utils.session import NEW_SESSION, provide_session T = TypeVar("T", bound=Callable) if TYPE_CHECKING: - from sqlalchemy.orm import Session - from airflow.models.dag import DAG logger = logging.getLogger(__name__) @@ -274,18 +270,6 @@ def get_dags(subdir: str | None, dag_id: str, use_regex: bool = False): return matched_dags -@provide_session -def get_dag_by_pickle(pickle_id: int, session: Session = NEW_SESSION) -> DAG: - """Fetch DAG from the database using pickling.""" - from airflow.models import DagPickle - - dag_pickle = session.scalar(select(DagPickle).where(DagPickle.id == pickle_id).limit(1)) - if not dag_pickle: - raise AirflowException(f"pickle_id could not be found in DagPickle.id list: {pickle_id}") - pickle_dag = dag_pickle.pickle - return pickle_dag - - def setup_locations(process, pid=None, stdout=None, stderr=None, log=None): """Create logging paths.""" if not stderr: diff --git a/airflow/utils/db.py b/airflow/utils/db.py index dd3e8c5d2002..d23f54068b59 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -97,7 +97,7 @@ class MappedClassProtocol(Protocol): "2.9.2": "686269002441", "2.10.0": "22ed7efa9da2", "2.10.3": "5f2621c13b39", - "3.0.0": "d8cd3297971e", + "3.0.0": "d03e4a635aa3", } diff --git a/airflow/www/static/js/types/api-generated.ts b/airflow/www/static/js/types/api-generated.ts index 7526c340b29f..cd602384b846 100644 --- a/airflow/www/static/js/types/api-generated.ts +++ b/airflow/www/static/js/types/api-generated.ts @@ -1007,13 +1007,6 @@ export interface components { * *New in version 2.3.0* */ last_parsed_time?: string | null; - /** - * Format: date-time - * @description The last time the DAG was pickled. - * - * *New in version 2.3.0* - */ - last_pickled?: string | null; /** * Format: date-time * @description Time when the DAG last received a refresh signal diff --git a/dev/perf/scheduler_dag_execution_timing.py b/dev/perf/scheduler_dag_execution_timing.py index d150eed41df0..cbc4ca6e8fc6 100755 --- a/dev/perf/scheduler_dag_execution_timing.py +++ b/dev/perf/scheduler_dag_execution_timing.py @@ -278,7 +278,7 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): executor = ShortCircuitExecutor(dag_ids_to_watch=dag_ids, num_runs=num_runs) scheduler_job = Job(executor=executor) - job_runner = SchedulerJobRunner(job=scheduler_job, dag_ids=dag_ids, do_pickle=False) + job_runner = SchedulerJobRunner(job=scheduler_job, dag_ids=dag_ids) executor.job_runner = job_runner total_tasks = sum(len(dag.tasks) for dag in dags) @@ -301,7 +301,7 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): reset_dag(dag, session) executor.reset(dag_ids) scheduler_job = Job(executor=executor) - job_runner = SchedulerJobRunner(job=scheduler_job, dag_ids=dag_ids, do_pickle=False) + job_runner = SchedulerJobRunner(job=scheduler_job, dag_ids=dag_ids) executor.scheduler_job = scheduler_job gc.disable() diff --git a/dev/perf/sql_queries.py b/dev/perf/sql_queries.py index 6303d5b6fcd3..60ca8f33f710 100644 --- a/dev/perf/sql_queries.py +++ b/dev/perf/sql_queries.py @@ -123,7 +123,7 @@ def run_scheduler_job(with_db_reset=False) -> None: if with_db_reset: reset_db() - job_runner = SchedulerJobRunner(job=Job(), subdir=DAG_FOLDER, do_pickle=False, num_runs=3) + job_runner = SchedulerJobRunner(job=Job(), subdir=DAG_FOLDER, num_runs=3) run_job(job=job_runner.job, execute_callable=job_runner._execute) diff --git a/docs/apache-airflow/img/airflow_erd.sha256 b/docs/apache-airflow/img/airflow_erd.sha256 index 8adffd106eae..572ce439c231 100644 --- a/docs/apache-airflow/img/airflow_erd.sha256 +++ b/docs/apache-airflow/img/airflow_erd.sha256 @@ -1 +1 @@ -1d781ee92cc59e7647d7f72ddc542b7f17e03fc8b822950db74415c38279d40f \ No newline at end of file +5ec1019b1b0f43b29fc83638c2a13c0bda90b7e4f0ff542aeab401bbfa9a83e4 \ No newline at end of file diff --git a/docs/apache-airflow/img/airflow_erd.svg b/docs/apache-airflow/img/airflow_erd.svg index 1b0d5b346c95..ba935dd6c4be 100644 --- a/docs/apache-airflow/img/airflow_erd.svg +++ b/docs/apache-airflow/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + log @@ -169,2136 +169,2106 @@ [TEXT] NOT NULL - - -dag_pickle - -dag_pickle - -id - - [INTEGER] - NOT NULL - -created_dttm - - [TIMESTAMP] - -pickle - - [BYTEA] - -pickle_hash - - [BIGINT] - - + connection - -connection - -id - - [INTEGER] - NOT NULL - -conn_id - - [VARCHAR(250)] - NOT NULL - -conn_type - - [VARCHAR(500)] - NOT NULL - -description - - [TEXT] - -extra - - [TEXT] - -host - - [VARCHAR(500)] - -is_encrypted - - [BOOLEAN] - -is_extra_encrypted - - [BOOLEAN] - -login - - [TEXT] - -password - - [TEXT] - -port - - [INTEGER] - -schema - - [VARCHAR(500)] + +connection + +id + + [INTEGER] + NOT NULL + +conn_id + + [VARCHAR(250)] + NOT NULL + +conn_type + + [VARCHAR(500)] + NOT NULL + +description + + [TEXT] + +extra + + [TEXT] + +host + + [VARCHAR(500)] + +is_encrypted + + [BOOLEAN] + +is_extra_encrypted + + [BOOLEAN] + +login + + [TEXT] + +password + + [TEXT] + +port + + [INTEGER] + +schema + + [VARCHAR(500)] - + variable - -variable - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -is_encrypted - - [BOOLEAN] - -key - - [VARCHAR(250)] - -val - - [TEXT] + +variable + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +is_encrypted + + [BOOLEAN] + +key + + [VARCHAR(250)] + +val + + [TEXT] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -filename - - [VARCHAR(1024)] - -processor_subdir - - [VARCHAR(2000)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +filename + + [VARCHAR(1024)] + +processor_subdir + + [VARCHAR(2000)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + job - -job - -id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - -end_date - - [TIMESTAMP] - -executor_class - - [VARCHAR(500)] - -hostname - - [VARCHAR(500)] - -job_type - - [VARCHAR(30)] - -latest_heartbeat - - [TIMESTAMP] - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -unixname - - [VARCHAR(1000)] + +job + +id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + +end_date + + [TIMESTAMP] + +executor_class + + [VARCHAR(500)] + +hostname + + [VARCHAR(500)] + +job_type + + [VARCHAR(30)] + +latest_heartbeat + + [TIMESTAMP] + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +unixname + + [VARCHAR(1000)] - + serialized_dag - -serialized_dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -fileloc - - [VARCHAR(2000)] - NOT NULL - -fileloc_hash - - [BIGINT] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -processor_subdir - - [VARCHAR(2000)] + +serialized_dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +fileloc + + [VARCHAR(2000)] + NOT NULL + +fileloc_hash + + [BIGINT] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +processor_subdir + + [VARCHAR(2000)] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -dag_display_name - - [VARCHAR(2000)] - -default_view - - [VARCHAR(25)] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_active - - [BOOLEAN] - -is_paused - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -last_pickled - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -pickle_id - - [INTEGER] - -processor_subdir - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +dag_display_name + + [VARCHAR(2000)] + +default_view + + [VARCHAR(25)] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_active + + [BOOLEAN] + +is_paused + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +processor_subdir + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [BYTEA] - -creating_job_id - - [INTEGER] - -dag_hash - - [VARCHAR(32)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -external_trigger - - [BOOLEAN] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - NOT NULL - -queued_at - - [TIMESTAMP] - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [BYTEA] + +creating_job_id + + [INTEGER] + +dag_hash + + [VARCHAR(32)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +external_trigger + + [BOOLEAN] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + NOT NULL + +queued_at + + [TIMESTAMP] + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +updated_at + + [TIMESTAMP] log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 - + task_instance - -task_instance - -id - - [UUID] - NOT NULL - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSON] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSON] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] dag_run--task_instance - -0..N -1 + +0..N +1 dag_run--task_instance - -0..N -1 + +0..N +1 - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] dag_run--dag_run_note - -1 -1 + +1 +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -try_number - - [INTEGER] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +try_number + + [INTEGER] + NOT NULL dag_run--task_reschedule - -0..N -1 + +0..N +1 dag_run--task_reschedule - -0..N -1 + +0..N +1 task_instance--task_reschedule - -0..N -1 + +0..N +1 task_instance--task_reschedule - -0..N -1 + +0..N +1 task_instance--task_reschedule - -0..N -1 + +0..N +1 task_instance--task_reschedule - -0..N -1 + +0..N +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSON] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSON] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [BYTEA] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [BYTEA] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -0..N -1 + +0..N +1 task_instance--task_instance_note - -0..N -1 + +0..N +1 task_instance--task_instance_note - -0..N -1 + +0..N +1 task_instance--task_instance_note - -0..N -1 + +0..N +1 - + task_instance_history - -task_instance_history - -id - - [INTEGER] - NOT NULL - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSON] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +id + + [INTEGER] + NOT NULL + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSON] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} + + + +alembic_version + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL session - -session - -id - - [INTEGER] - NOT NULL - -data - - [BYTEA] - -expiry - - [TIMESTAMP] - -session_id - - [VARCHAR(255)] - - - -alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +session + +id + + [INTEGER] + NOT NULL + +data + + [BYTEA] + +expiry + + [TIMESTAMP] + +session_id + + [VARCHAR(255)] - + ab_user - -ab_user - -id - - [INTEGER] - NOT NULL - -active - - [BOOLEAN] - -changed_by_fk - - [INTEGER] - -changed_on - - [TIMESTAMP] - -created_by_fk - - [INTEGER] - -created_on - - [TIMESTAMP] - -email - - [VARCHAR(512)] - NOT NULL - -fail_login_count - - [INTEGER] - -first_name - - [VARCHAR(256)] - NOT NULL - -last_login - - [TIMESTAMP] - -last_name - - [VARCHAR(256)] - NOT NULL - -login_count - - [INTEGER] - -password - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_user + +id + + [INTEGER] + NOT NULL + +active + + [BOOLEAN] + +changed_by_fk + + [INTEGER] + +changed_on + + [TIMESTAMP] + +created_by_fk + + [INTEGER] + +created_on + + [TIMESTAMP] + +email + + [VARCHAR(512)] + NOT NULL + +fail_login_count + + [INTEGER] + +first_name + + [VARCHAR(256)] + NOT NULL + +last_login + + [TIMESTAMP] + +last_name + + [VARCHAR(256)] + NOT NULL + +login_count + + [INTEGER] + +password + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} - + ab_user_role - -ab_user_role - -id - - [INTEGER] - NOT NULL - -role_id - - [INTEGER] - -user_id - - [INTEGER] + +ab_user_role + +id + + [INTEGER] + NOT NULL + +role_id + + [INTEGER] + +user_id + + [INTEGER] ab_user--ab_user_role - -0..N -{0,1} + +0..N +{0,1} - + ab_register_user - -ab_register_user - -id - - [INTEGER] - NOT NULL - -email - - [VARCHAR(512)] - NOT NULL - -first_name - - [VARCHAR(256)] - NOT NULL - -last_name - - [VARCHAR(256)] - NOT NULL - -password - - [VARCHAR(256)] - -registration_date - - [TIMESTAMP] - -registration_hash - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_register_user + +id + + [INTEGER] + NOT NULL + +email + + [VARCHAR(512)] + NOT NULL + +first_name + + [VARCHAR(256)] + NOT NULL + +last_name + + [VARCHAR(256)] + NOT NULL + +password + + [VARCHAR(256)] + +registration_date + + [TIMESTAMP] + +registration_hash + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL - + ab_permission - -ab_permission - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +ab_permission + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL - + ab_permission_view - -ab_permission_view - -id - - [INTEGER] - NOT NULL - -permission_id - - [INTEGER] - -view_menu_id - - [INTEGER] + +ab_permission_view + +id + + [INTEGER] + NOT NULL + +permission_id + + [INTEGER] + +view_menu_id + + [INTEGER] ab_permission--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_permission_view_role - -ab_permission_view_role - -id - - [INTEGER] - NOT NULL - -permission_view_id - - [INTEGER] - -role_id - - [INTEGER] + +ab_permission_view_role + +id + + [INTEGER] + NOT NULL + +permission_view_id + + [INTEGER] + +role_id + + [INTEGER] ab_permission_view--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + ab_view_menu - -ab_view_menu - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(250)] - NOT NULL + +ab_view_menu + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(250)] + NOT NULL ab_view_menu--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_role - -ab_role - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(64)] - NOT NULL + +ab_role + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(64)] + NOT NULL ab_role--ab_user_role - -0..N -{0,1} + +0..N +{0,1} ab_role--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + alembic_version_fab - -alembic_version_fab - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version_fab + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/docs/apache-airflow/migrations-ref.rst b/docs/apache-airflow/migrations-ref.rst index f133a67e08ef..61dde39958e2 100644 --- a/docs/apache-airflow/migrations-ref.rst +++ b/docs/apache-airflow/migrations-ref.rst @@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``d8cd3297971e`` (head) | ``5f57a45b8433`` | ``3.0.0`` | Add last_heartbeat_at directly to TI. | +| ``d03e4a635aa3`` (head) | ``d8cd3297971e`` | ``3.0.0`` | Drop DAG pickling. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``d8cd3297971e`` | ``5f57a45b8433`` | ``3.0.0`` | Add last_heartbeat_at directly to TI. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``5f57a45b8433`` | ``486ac7936b78`` | ``3.0.0`` | Drop task_fail table. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/newsfragments/aip-72.significant.rst b/newsfragments/aip-72.significant.rst index 6c4467696170..2baafad7ab8b 100644 --- a/newsfragments/aip-72.significant.rst +++ b/newsfragments/aip-72.significant.rst @@ -13,3 +13,7 @@ As part of this change the following breaking changes have occurred: There were two build in options for this, Standard (the default) which used Fork or a new process as appropriate, and CGroupRunner to launch tasks in a new CGroup (not usable inside docker or Kubernetes). With the move of the execution time code into the TaskSDK we are using this opportunity to reduce complexity for seldom used features. + +- Shipping DAGs via pickle is no longer supported + + This was a feature that was not widely used and was a security risk. It has been removed. diff --git a/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py b/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py index acd1afcba995..a8c69871ab9c 100644 --- a/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py +++ b/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py @@ -56,6 +56,7 @@ class CeleryKubernetesExecutor(BaseExecutor): """ supports_ad_hoc_ti_run: bool = True + # TODO: Remove this flag once providers depend on Airflow 3.0 supports_pickling: bool = True supports_sentry: bool = False @@ -159,7 +160,6 @@ def queue_task_instance( self, task_instance: TaskInstance, mark_success: bool = False, - pickle_id: int | None = None, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, wait_for_past_depends_before_skipping: bool = False, @@ -167,6 +167,7 @@ def queue_task_instance( ignore_ti_state: bool = False, pool: str | None = None, cfg_path: str | None = None, + **kwargs, ) -> None: """Queues task instance via celery or kubernetes executor.""" from airflow.models.taskinstance import SimpleTaskInstance @@ -175,10 +176,14 @@ def queue_task_instance( self.log.debug( "Using executor: %s to queue_task_instance for %s", executor.__class__.__name__, task_instance.key ) + + # TODO: Remove this once providers depend on Airflow 3.0 + if not hasattr(task_instance, "pickle_id"): + del kwargs["pickle_id"] + executor.queue_task_instance( task_instance=task_instance, mark_success=mark_success, - pickle_id=pickle_id, ignore_all_deps=ignore_all_deps, ignore_depends_on_past=ignore_depends_on_past, wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, @@ -186,6 +191,7 @@ def queue_task_instance( ignore_ti_state=ignore_ti_state, pool=pool, cfg_path=cfg_path, + **kwargs, ) def get_task_log(self, ti: TaskInstance, try_number: int) -> tuple[list[str], list[str]]: diff --git a/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py index 63755d3d11a1..d24a59a95d10 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py +++ b/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py @@ -45,6 +45,7 @@ class LocalKubernetesExecutor(BaseExecutor): """ supports_ad_hoc_ti_run: bool = True + # TODO: Remove this attribute once providers rely on Airflow >=3.0.0 supports_pickling: bool = False supports_sentry: bool = False @@ -146,7 +147,6 @@ def queue_task_instance( self, task_instance: TaskInstance, mark_success: bool = False, - pickle_id: int | None = None, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, wait_for_past_depends_before_skipping: bool = False, @@ -154,6 +154,7 @@ def queue_task_instance( ignore_ti_state: bool = False, pool: str | None = None, cfg_path: str | None = None, + **kwargs, ) -> None: """Queues task instance via local or kubernetes executor.""" from airflow.models.taskinstance import SimpleTaskInstance @@ -162,10 +163,13 @@ def queue_task_instance( self.log.debug( "Using executor: %s to queue_task_instance for %s", executor.__class__.__name__, task_instance.key ) + + if not hasattr(task_instance, "pickle_id"): + del kwargs["pickle_id"] + executor.queue_task_instance( task_instance=task_instance, mark_success=mark_success, - pickle_id=pickle_id, ignore_all_deps=ignore_all_deps, ignore_depends_on_past=ignore_depends_on_past, wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, @@ -173,6 +177,7 @@ def queue_task_instance( ignore_ti_state=ignore_ti_state, pool=pool, cfg_path=cfg_path, + **kwargs, ) def get_task_log(self, ti: TaskInstance, try_number: int) -> tuple[list[str], list[str]]: diff --git a/providers/tests/celery/executors/test_celery_executor.py b/providers/tests/celery/executors/test_celery_executor.py index 71fae6691c6f..2fa72deab0aa 100644 --- a/providers/tests/celery/executors/test_celery_executor.py +++ b/providers/tests/celery/executors/test_celery_executor.py @@ -110,9 +110,6 @@ def teardown_method(self) -> None: db.clear_db_runs() db.clear_db_jobs() - def test_supports_pickling(self): - assert CeleryExecutor.supports_pickling - def test_supports_sentry(self): assert CeleryExecutor.supports_sentry diff --git a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 13ca0ed828c6..ea143edd8298 100644 --- a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -1750,9 +1750,6 @@ def test_get_task_log(self, mock_get_kube_client, create_task_instance_of_operat "Reading from k8s pod logs failed: error_fetching_pod_log", ] - def test_supports_pickling(self): - assert KubernetesExecutor.supports_pickling - def test_supports_sentry(self): assert not KubernetesExecutor.supports_sentry diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index 5249944ea113..1cd014ccb0cf 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -185,7 +185,6 @@ def test_should_respond_200(self): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -224,7 +223,6 @@ def test_should_respond_200_with_schedule_none(self, session): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -311,7 +309,6 @@ def test_should_respond_200(self, url_safe_serializer): "last_expired": None, "last_parsed": last_parsed, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_active_tasks": 16, "max_consecutive_failed_dag_runs": 0, @@ -372,7 +369,6 @@ def test_should_respond_200_with_asset_expression(self, url_safe_serializer): "last_expired": None, "last_parsed": last_parsed, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_consecutive_failed_dag_runs": 0, "max_active_tasks": 16, @@ -428,7 +424,6 @@ def test_should_response_200_with_doc_md_none(self, url_safe_serializer): "last_expired": None, "last_parsed": last_parsed, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_consecutive_failed_dag_runs": 0, "max_active_tasks": 16, @@ -477,7 +472,6 @@ def test_should_response_200_for_null_start_date(self, url_safe_serializer): "last_expired": None, "last_parsed": last_parsed, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_consecutive_failed_dag_runs": 0, "max_active_tasks": 16, @@ -528,7 +522,6 @@ def test_should_respond_200_serialized(self, url_safe_serializer): "is_paused_upon_creation": None, "last_expired": None, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_consecutive_failed_dag_runs": 0, "max_active_tasks": 16, @@ -587,7 +580,6 @@ def test_should_respond_200_serialized(self, url_safe_serializer): "is_paused_upon_creation": None, "last_expired": None, "last_parsed_time": None, - "last_pickled": None, "max_active_runs": 16, "max_consecutive_failed_dag_runs": 0, "max_active_tasks": 16, @@ -699,7 +691,6 @@ def test_should_respond_200(self, session, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -725,7 +716,6 @@ def test_should_respond_200(self, session, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -763,7 +753,6 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -802,7 +791,6 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -828,7 +816,6 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -983,7 +970,6 @@ def test_paused_true_returns_paused_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1021,7 +1007,6 @@ def test_paused_false_returns_unpaused_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1059,7 +1044,6 @@ def test_paused_none_returns_all_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1085,7 +1069,6 @@ def test_paused_none_returns_all_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1173,7 +1156,6 @@ def test_should_respond_200_on_patch_is_paused(self, url_safe_serializer, sessio "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1270,7 +1252,6 @@ def test_should_respond_200_with_update_mask(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1363,7 +1344,6 @@ def test_should_respond_200_on_patch_is_paused(self, session, url_safe_serialize "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1389,7 +1369,6 @@ def test_should_respond_200_on_patch_is_paused(self, session, url_safe_serialize "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1440,7 +1419,6 @@ def test_should_respond_200_on_patch_is_paused_using_update_mask(self, session, "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1466,7 +1444,6 @@ def test_should_respond_200_on_patch_is_paused_using_update_mask(self, session, "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1557,7 +1534,6 @@ def test_only_active_true_returns_active_dags(self, url_safe_serializer, session "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1604,7 +1580,6 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1630,7 +1605,6 @@ def test_only_active_false_returns_all_dags(self, url_safe_serializer, session): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1827,7 +1801,6 @@ def test_should_respond_200_and_pause_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1853,7 +1826,6 @@ def test_should_respond_200_and_pause_dags(self, url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1900,7 +1872,6 @@ def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serial "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1926,7 +1897,6 @@ def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serial "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -1975,7 +1945,6 @@ def test_should_respond_200_and_reverse_ordering(self, session, url_safe_seriali "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -2001,7 +1970,6 @@ def test_should_respond_200_and_reverse_ordering(self, session, url_safe_seriali "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, diff --git a/tests/api_connexion/schemas/test_dag_schema.py b/tests/api_connexion/schemas/test_dag_schema.py index 4a6829a5c831..a14365f07c1e 100644 --- a/tests/api_connexion/schemas/test_dag_schema.py +++ b/tests/api_connexion/schemas/test_dag_schema.py @@ -67,7 +67,6 @@ def test_serialize_test_dag_schema(url_safe_serializer): "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -102,7 +101,6 @@ def test_serialize_test_dag_collection_schema(url_safe_serializer): "last_expired": None, "max_active_tasks": 16, "max_consecutive_failed_dag_runs": 0, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, @@ -128,7 +126,6 @@ def test_serialize_test_dag_collection_schema(url_safe_serializer): "last_expired": None, "max_active_tasks": 16, "max_consecutive_failed_dag_runs": 0, - "last_pickled": None, "default_view": None, "last_parsed_time": None, "timetable_description": None, diff --git a/tests/api_fastapi/core_api/routes/public/test_dags.py b/tests/api_fastapi/core_api/routes/public/test_dags.py index 0e9b7a408583..f913fd36e4bb 100644 --- a/tests/api_fastapi/core_api/routes/public/test_dags.py +++ b/tests/api_fastapi/core_api/routes/public/test_dags.py @@ -312,7 +312,6 @@ def test_dag_details( "last_expired": None, "last_parsed": last_parsed, "last_parsed_time": last_parsed_time, - "last_pickled": None, "max_active_runs": 16, "max_active_tasks": 16, "max_consecutive_failed_dag_runs": 0, @@ -329,7 +328,6 @@ def test_dag_details( "value": 1, } }, - "pickle_id": None, "render_template_as_native_obj": False, "timetable_summary": None, "start_date": start_date.replace(tzinfo=None).isoformat() + "Z", # pydantic datetime format @@ -381,12 +379,10 @@ def test_get_dag(self, test_client, query_params, dag_id, expected_status_code, "next_dagrun_create_after": None, "last_expired": None, "max_active_tasks": 16, - "last_pickled": None, "default_view": "grid", "last_parsed_time": last_parsed_time, "timetable_description": "Never, external triggers only", "has_import_errors": False, - "pickle_id": None, } assert res_json == expected diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 5a4e0b279242..ed1a2c28754f 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -21,7 +21,6 @@ import json import logging import os -import re import shutil import sys from argparse import ArgumentParser @@ -288,7 +287,6 @@ def test_run_with_existing_dag_run_id(self, mock_local_job_runner): wait_for_past_depends_before_skipping=False, ignore_task_deps=False, ignore_ti_state=False, - pickle_id=None, pool=None, external_executor_id=None, ) @@ -323,7 +321,6 @@ def test_run_with_read_from_db(self, mock_local_job_runner, caplog, from_db): wait_for_past_depends_before_skipping=False, ignore_task_deps=False, ignore_ti_state=False, - pickle_id=None, pool=None, external_executor_id=None, ) @@ -606,31 +603,6 @@ def test_task_render_with_custom_timetable(self, mock_dagrun, mock_scalars, mock ) assert "data_interval" in mock_dagrun.call_args.kwargs - def test_cli_run_when_pickle_and_dag_cli_method_selected(self): - """ - tasks run should return an AirflowException when invalid pickle_id is passed - """ - pickle_id = "pickle_id" - - with pytest.raises( - AirflowException, - match=re.escape("You cannot use the --pickle option when using DAG.cli() method."), - ): - task_command.task_run( - self.parser.parse_args( - [ - "tasks", - "run", - "example_bash_operator", - "runme_0", - DEFAULT_DATE.isoformat(), - "--pickle", - pickle_id, - ] - ), - self.dag, - ) - def test_task_state(self): task_command.task_state( self.parser.parse_args( @@ -784,7 +756,6 @@ def test_external_executor_id_present_for_fork_run_task(self, mock_local_job): job=mock.ANY, task_instance=mock.ANY, mark_success=False, - pickle_id=None, ignore_all_deps=False, ignore_depends_on_past=False, wait_for_past_depends_before_skipping=False, @@ -806,7 +777,6 @@ def test_external_executor_id_present_for_process_run_task(self, mock_local_job) job=mock.ANY, task_instance=mock.ANY, mark_success=False, - pickle_id=None, ignore_all_deps=False, ignore_depends_on_past=False, wait_for_past_depends_before_skipping=False, diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 891223e2cd67..192a12358e8d 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -81,8 +81,8 @@ class FakeDagFileProcessorRunner(DagFileProcessorProcess): # This fake processor will return the zombies it received in constructor # as its processing result w/o actually parsing anything. - def __init__(self, file_path, pickle_dags, dag_ids, dag_directory, callbacks): - super().__init__(file_path, pickle_dags, dag_ids, dag_directory, callbacks) + def __init__(self, file_path, dag_ids, dag_directory, callbacks): + super().__init__(file_path, dag_ids, dag_directory, callbacks) # We need a "real" selectable handle for waitable_handle to work readable, writable = multiprocessing.Pipe(duplex=False) writable.send("abc") @@ -110,10 +110,9 @@ def result(self): return self._result @staticmethod - def _create_process(file_path, callback_requests, dag_ids, dag_directory, pickle_dags): + def _create_process(file_path, callback_requests, dag_ids, dag_directory): return FakeDagFileProcessorRunner( file_path, - pickle_dags, dag_ids, dag_directory, callback_requests, @@ -179,7 +178,6 @@ def test_remove_file_clears_import_error(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=async_mode, ), ) @@ -215,7 +213,6 @@ def test_max_runs_when_no_files(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=async_mode, ), ) @@ -239,7 +236,6 @@ def test_start_new_processes_with_same_filepath(self, _): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -273,7 +269,6 @@ def test_set_file_paths_when_processor_file_path_not_in_new_file_paths(self): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -298,7 +293,6 @@ def test_set_file_paths_when_processor_file_path_is_in_new_file_paths(self): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -332,7 +326,6 @@ def test_file_paths_in_queue_sorted_alphabetically( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -364,7 +357,6 @@ def test_file_paths_in_queue_sorted_random_seeded_by_host( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -429,7 +421,6 @@ def test_file_paths_in_queue_sorted_by_modified_time( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -469,7 +460,6 @@ def test_file_paths_in_queue_excludes_missing_file( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -506,7 +496,6 @@ def test_add_new_file_to_parsing_queue( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -554,7 +543,6 @@ def test_recently_modified_file_is_parsed_with_mtime_mode( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -615,7 +603,6 @@ def test_file_paths_in_queue_sorted_by_priority( processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -644,7 +631,6 @@ def test_scan_stale_dags(self): processor_timeout=timedelta(minutes=10), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -722,7 +708,6 @@ def test_scan_stale_dags_standalone_mode(self): processor_timeout=timedelta(minutes=10), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -779,14 +764,12 @@ def test_kill_timed_out_processors_kill(self, mock_kill, mock_pid, mock_waitable processor_timeout=timedelta(seconds=5), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) processor = DagFileProcessorProcess( file_path="abc.txt", - pickle_dags=False, dag_ids=[], dag_directory=TEST_DAG_FOLDER, callback_requests=[], @@ -812,14 +795,12 @@ def test_kill_timed_out_processors_no_kill(self, mock_dag_file_processor, mock_p processor_timeout=timedelta(seconds=5), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) processor = DagFileProcessorProcess( file_path="abc.txt", - pickle_dags=False, dag_ids=[], dag_directory=str(TEST_DAG_FOLDER), callback_requests=[], @@ -854,7 +835,6 @@ def test_dag_with_system_exit(self): max_runs=1, processor_timeout=timedelta(seconds=5), signal_conn=child_pipe, - pickle_dags=False, async_mode=True, ), ) @@ -901,7 +881,6 @@ def test_import_error_with_dag_directory(self, tmp_path): max_runs=1, signal_conn=child_pipe, processor_timeout=timedelta(seconds=5), - pickle_dags=False, async_mode=False, ), ) @@ -922,7 +901,6 @@ def test_import_error_with_dag_directory(self, tmp_path): max_runs=1, signal_conn=child_pipe, processor_timeout=timedelta(seconds=5), - pickle_dags=False, async_mode=True, ), ) @@ -992,7 +970,6 @@ def fake_processor_(*args, **kwargs): max_runs=100, processor_timeout=timedelta(seconds=5), signal_conn=child_pipe, - pickle_dags=False, async_mode=True, ) @@ -1034,7 +1011,6 @@ def test_send_file_processing_statsd_timing(self, statsd_timing_mock, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=async_mode, ), ) @@ -1068,7 +1044,6 @@ def test_refresh_dags_dir_doesnt_delete_zipped_dags(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -1098,7 +1073,6 @@ def test_refresh_dags_dir_deactivates_deleted_zipped_dags(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -1144,7 +1118,6 @@ def test_refresh_dags_dir_does_not_interfer_with_dags_outside_its_subdir(self, t processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -1194,7 +1167,6 @@ def test_fetch_callbacks_from_database(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=False, ), ) @@ -1241,7 +1213,6 @@ def test_fetch_callbacks_for_current_dag_directory_only(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=False, ), ) @@ -1281,7 +1252,6 @@ def test_fetch_callbacks_from_database_max_per_loop(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=False, ), ) @@ -1322,7 +1292,6 @@ def test_fetch_callbacks_from_database_not_standalone(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=child_pipe, dag_ids=[], - pickle_dags=False, async_mode=False, ), ) @@ -1345,7 +1314,6 @@ def test_callback_queue(self, tmp_path): processor_timeout=timedelta(days=365), signal_conn=MagicMock(), dag_ids=[], - pickle_dags=False, async_mode=True, ), ) @@ -1446,9 +1414,7 @@ class path, thus when reloading logging module the airflow.processor_manager os.remove(log_file_loc) # Starting dag processing with 0 max_runs to avoid redundant operations. - processor_agent = DagFileProcessorAgent( - test_dag_path, 0, timedelta(days=365), [], False, async_mode - ) + processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() @@ -1467,7 +1433,7 @@ def test_parse_once(self): test_dag_path = TEST_DAG_FOLDER / "test_scheduler_dags.py" async_mode = "sqlite" not in conf.get("database", "sql_alchemy_conn") - processor_agent = DagFileProcessorAgent(test_dag_path, 1, timedelta(days=365), [], False, async_mode) + processor_agent = DagFileProcessorAgent(test_dag_path, 1, timedelta(days=365), [], async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() @@ -1495,7 +1461,7 @@ def test_launch_process(self): os.remove(log_file_loc) # Starting dag processing with 0 max_runs to avoid redundant operations. - processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], False, async_mode) + processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() @@ -1505,21 +1471,21 @@ def test_launch_process(self): assert os.path.isfile(log_file_loc) def test_single_parsing_loop_no_parent_signal_conn(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._process = Mock() processor_agent._parent_signal_conn = None with pytest.raises(ValueError, match="Process not started"): processor_agent.run_single_parsing_loop() def test_single_parsing_loop_no_process(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._process = None with pytest.raises(ValueError, match="Process not started"): processor_agent.run_single_parsing_loop() def test_single_parsing_loop_process_isnt_alive(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._process = Mock() processor_agent._parent_signal_conn = Mock() processor_agent._process.is_alive.return_value = False @@ -1527,7 +1493,7 @@ def test_single_parsing_loop_process_isnt_alive(self): assert not ret_val def test_single_parsing_loop_process_conn_error(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._process = Mock() processor_agent._parent_signal_conn = Mock() processor_agent._process.is_alive.return_value = True @@ -1536,25 +1502,25 @@ def test_single_parsing_loop_process_conn_error(self): assert not ret_val def test_get_callbacks_pipe(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() retval = processor_agent.get_callbacks_pipe() assert retval == processor_agent._parent_signal_conn def test_get_callbacks_pipe_no_parent_signal_conn(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = None with pytest.raises(ValueError, match="Process not started"): processor_agent.get_callbacks_pipe() def test_wait_until_finished_no_parent_signal_conn(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = None with pytest.raises(ValueError, match="Process not started"): processor_agent.wait_until_finished() def test_wait_until_finished_poll_eof_error(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._parent_signal_conn.poll.return_value = True processor_agent._parent_signal_conn.recv = Mock() @@ -1563,13 +1529,13 @@ def test_wait_until_finished_poll_eof_error(self): assert ret_val is None def test_heartbeat_no_parent_signal_conn(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = None with pytest.raises(ValueError, match="Process not started"): processor_agent.heartbeat() def test_heartbeat_poll_eof_error(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._parent_signal_conn.poll.return_value = True processor_agent._parent_signal_conn.recv = Mock() @@ -1578,7 +1544,7 @@ def test_heartbeat_poll_eof_error(self): assert ret_val is None def test_heartbeat_poll_connection_error(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._parent_signal_conn.poll.return_value = True processor_agent._parent_signal_conn.recv = Mock() @@ -1587,7 +1553,7 @@ def test_heartbeat_poll_connection_error(self): assert ret_val is None def test_heartbeat_poll_process_message(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._parent_signal_conn.poll.side_effect = [True, False] processor_agent._parent_signal_conn.recv = Mock() @@ -1598,19 +1564,19 @@ def test_heartbeat_poll_process_message(self): def test_process_message_invalid_type(self): message = "xyz" - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) with pytest.raises(RuntimeError, match="Unexpected message received of type str"): processor_agent._process_message(message) def test_heartbeat_manager(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = None with pytest.raises(ValueError, match="Process not started"): processor_agent._heartbeat_manager() @mock.patch("airflow.utils.process_utils.reap_process_group") def test_heartbeat_manager_process_restart(self, mock_pg): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._process = MagicMock() processor_agent.start = Mock() @@ -1624,7 +1590,7 @@ def test_heartbeat_manager_process_restart(self, mock_pg): @mock.patch("time.monotonic") @mock.patch("airflow.dag_processing.manager.reap_process_group") def test_heartbeat_manager_process_reap(self, mock_pg, mock_time_monotonic, mock_stats): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._process = Mock() processor_agent._process.pid = 12345 @@ -1645,7 +1611,7 @@ def test_heartbeat_manager_process_reap(self, mock_pg, mock_time_monotonic, mock processor_agent.start.assert_called() def test_heartbeat_manager_terminate(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._parent_signal_conn = Mock() processor_agent._process = Mock() processor_agent._process.is_alive.return_value = True @@ -1655,7 +1621,7 @@ def test_heartbeat_manager_terminate(self): processor_agent._parent_signal_conn.send.assert_called_with(DagParsingSignal.TERMINATE_MANAGER) def test_heartbeat_manager_terminate_conn_err(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._process = Mock() processor_agent._process.is_alive.return_value = True processor_agent._parent_signal_conn = Mock() @@ -1666,7 +1632,7 @@ def test_heartbeat_manager_terminate_conn_err(self): processor_agent._parent_signal_conn.send.assert_called_with(DagParsingSignal.TERMINATE_MANAGER) def test_heartbeat_manager_end_no_process(self): - processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False, False) + processor_agent = DagFileProcessorAgent("", 1, timedelta(days=365), [], False) processor_agent._process = Mock() processor_agent._process.__bool__ = Mock(return_value=False) processor_agent._process.side_effect = [None] @@ -1682,7 +1648,7 @@ def test_log_to_stdout(self, capfd): async_mode = "sqlite" not in conf.get("database", "sql_alchemy_conn") # Starting dag processing with 0 max_runs to avoid redundant operations. - processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], False, async_mode) + processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() @@ -1701,7 +1667,7 @@ def test_not_log_to_stdout(self, capfd): async_mode = "sqlite" not in conf.get("database", "sql_alchemy_conn") # Starting dag processing with 0 max_runs to avoid redundant operations. - processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], False, async_mode) + processor_agent = DagFileProcessorAgent(test_dag_path, 0, timedelta(days=365), [], async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index 439c1123f995..f117b3ffe458 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -112,7 +112,7 @@ def _process_file(self, file_path, dag_directory, session): dag_ids=[], dag_directory=str(dag_directory), log=mock.MagicMock() ) - dag_file_processor.process_file(file_path, [], False) + dag_file_processor.process_file(file_path, []) @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode @patch.object(TaskInstance, "handle_failure") @@ -594,7 +594,6 @@ def test_import_error_tracebacks_zip_depth(self, tmp_path): def test_dag_parser_output_when_logging_to_stdout(self, mock_redirect_stdout_for_file): processor = DagFileProcessorProcess( file_path="abc.txt", - pickle_dags=False, dag_ids=[], dag_directory=[], callback_requests=[], @@ -603,7 +602,6 @@ def test_dag_parser_output_when_logging_to_stdout(self, mock_redirect_stdout_for result_channel=MagicMock(), parent_channel=MagicMock(), file_path="fake_file_path", - pickle_dags=False, dag_ids=[], thread_name="fake_thread_name", callback_requests=[], @@ -618,7 +616,6 @@ def test_dag_parser_output_when_logging_to_stdout(self, mock_redirect_stdout_for def test_dag_parser_output_when_logging_to_file(self, mock_redirect_stdout_for_file): processor = DagFileProcessorProcess( file_path="abc.txt", - pickle_dags=False, dag_ids=[], dag_directory=[], callback_requests=[], @@ -627,7 +624,6 @@ def test_dag_parser_output_when_logging_to_file(self, mock_redirect_stdout_for_f result_channel=MagicMock(), parent_channel=MagicMock(), file_path="fake_file_path", - pickle_dags=False, dag_ids=[], thread_name="fake_thread_name", callback_requests=[], @@ -645,7 +641,6 @@ def test_no_valueerror_with_parseable_dag_in_zip(self, mock_context, tmp_path): processor = DagFileProcessorProcess( file_path=zip_filename, - pickle_dags=False, dag_ids=[], dag_directory=[], callback_requests=[], @@ -662,7 +657,6 @@ def test_nullbyte_exception_handling_when_preimporting_airflow(self, mock_contex processor = DagFileProcessorProcess( file_path=dag_filename, - pickle_dags=False, dag_ids=[], dag_directory=[], callback_requests=[], @@ -696,7 +690,6 @@ def test_error_when_waiting_in_async_mode(self, tmp_path): max_runs=1, processor_timeout=datetime.timedelta(1), dag_ids=[], - pickle_dags=False, async_mode=True, ) self.processor_agent.start() @@ -709,7 +702,6 @@ def test_default_multiprocessing_behaviour(self, tmp_path): max_runs=1, processor_timeout=datetime.timedelta(1), dag_ids=[], - pickle_dags=False, async_mode=False, ) self.processor_agent.start() @@ -723,7 +715,6 @@ def test_spawn_multiprocessing_behaviour(self, tmp_path): max_runs=1, processor_timeout=datetime.timedelta(1), dag_ids=[], - pickle_dags=False, async_mode=False, ) self.processor_agent.start() diff --git a/tests/executors/test_base_executor.py b/tests/executors/test_base_executor.py index da7422737ac4..be3ad517d70c 100644 --- a/tests/executors/test_base_executor.py +++ b/tests/executors/test_base_executor.py @@ -44,10 +44,6 @@ def test_supports_sentry(): assert not BaseExecutor.supports_sentry -def test_supports_pickling(): - assert BaseExecutor.supports_pickling - - def test_is_local_default_value(): assert not BaseExecutor.is_local diff --git a/tests/executors/test_local_executor.py b/tests/executors/test_local_executor.py index 9443f0395fb1..7bd4fbec203b 100644 --- a/tests/executors/test_local_executor.py +++ b/tests/executors/test_local_executor.py @@ -34,9 +34,6 @@ class TestLocalExecutor: TEST_SUCCESS_COMMANDS = 5 - def test_supports_pickling(self): - assert not LocalExecutor.supports_pickling - def test_supports_sentry(self): assert not LocalExecutor.supports_sentry diff --git a/tests/executors/test_sequential_executor.py b/tests/executors/test_sequential_executor.py index 54e2a9170316..f6cb7aae575b 100644 --- a/tests/executors/test_sequential_executor.py +++ b/tests/executors/test_sequential_executor.py @@ -23,9 +23,6 @@ class TestSequentialExecutor: - def test_supports_pickling(self): - assert not SequentialExecutor.supports_pickling - def test_supports_sentry(self): assert not SequentialExecutor.supports_sentry diff --git a/tests/listeners/test_dag_import_error_listener.py b/tests/listeners/test_dag_import_error_listener.py index 5709ba19a8de..cae92af19844 100644 --- a/tests/listeners/test_dag_import_error_listener.py +++ b/tests/listeners/test_dag_import_error_listener.py @@ -99,7 +99,7 @@ def _process_file(self, file_path, dag_directory, session): dag_ids=[], dag_directory=str(dag_directory), log=mock.MagicMock() ) - dag_file_processor.process_file(file_path, [], False) + dag_file_processor.process_file(file_path, []) @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode def test_newly_added_import_error(self, tmp_path, session): diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 218f635ad91d..e38beb2110ca 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -1308,13 +1308,6 @@ def test_fractional_seconds(self): assert start_date == run.start_date, "dag run start_date loses precision " self._clean_up(dag_id) - def test_pickling(self): - test_dag_id = "test_pickling" - args = {"owner": "airflow", "start_date": DEFAULT_DATE} - dag = DAG(test_dag_id, schedule=None, default_args=args) - dag_pickle = dag.pickle() - assert dag_pickle.pickle.dag_id == dag.dag_id - def test_rich_comparison_ops(self): test_dag_id = "test_rich_comparison_ops" diff --git a/tests/utils/test_cli_util.py b/tests/utils/test_cli_util.py index ba018cdad36d..e60146b9558f 100644 --- a/tests/utils/test_cli_util.py +++ b/tests/utils/test_cli_util.py @@ -33,7 +33,7 @@ from airflow.exceptions import AirflowException from airflow.models.log import Log from airflow.utils import cli, cli_action_loggers, timezone -from airflow.utils.cli import _search_for_dag_file, get_dag_by_pickle +from airflow.utils.cli import _search_for_dag_file # Mark entire module as db_test because ``action_cli`` wrapper still could use DB on callbacks: # - ``cli_action_loggers.on_pre_execution`` @@ -169,22 +169,6 @@ def test_setup_locations_none_pid_path(self): pid, _, _, _ = cli.setup_locations(process=process_name) assert pid == default_pid_path - def test_get_dag_by_pickle(self, session, dag_maker): - from airflow.models.dagpickle import DagPickle - - with dag_maker(dag_id="test_get_dag_by_pickle") as dag: - pass - - dp = DagPickle(dag=dag) - session.add(dp) - session.commit() - - dp_from_db = get_dag_by_pickle(pickle_id=dp.id, session=session) - assert dp_from_db.dag_id == "test_get_dag_by_pickle" - - with pytest.raises(AirflowException, match="pickle_id could not be found .* -42"): - get_dag_by_pickle(pickle_id=-42, session=session) - @pytest.mark.parametrize( ["given_command", "expected_masked_command"], [ diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index c05e0ceb5050..47e93c1616d6 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -338,7 +338,6 @@ def test_no_models_missing(self): "log_template", # not a significant source of data; age not indicative of staleness "dag_tag", # not a significant source of data; age not indicative of staleness, "dag_owner_attributes", # not a significant source of data; age not indicative of staleness, - "dag_pickle", # unsure of consequences "dag_code", # self-maintaining "dag_warning", # self-maintaining "connection", # leave alone diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 44684cdb9ca7..59a2a288241b 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -205,7 +205,7 @@ def client_single_dag_edit(app, user_single_dag_edit): def _process_file(file_path): dag_file_processor = DagFileProcessor(dag_ids=[], dag_directory="/tmp", log=mock.MagicMock()) - dag_file_processor.process_file(file_path, [], False) + dag_file_processor.process_file(file_path, []) @pytest.fixture From d8f71a2d9fbc281ed1de281a00ddee6fbebaf0f7 Mon Sep 17 00:00:00 2001 From: yangyulely Date: Tue, 5 Nov 2024 17:38:05 +0800 Subject: [PATCH 031/137] Remove returns in final clause of S3ToDynamoDBOperator (#43456) * Remove returns in final clause of S3ToDynamoDBOperator * remove temp table in finally * remove duplicate log Co-authored-by: rom sharon <33751805+romsharon98@users.noreply.github.com> * remove except statement * remove test case --------- Co-authored-by: rom sharon <33751805+romsharon98@users.noreply.github.com> --- .../airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py index 5522f90628a8..57b18299a80d 100644 --- a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +++ b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py @@ -240,7 +240,7 @@ def _load_into_existing_table(self) -> str: finally: self.log.info("Delete tmp DynamoDB table %s", self.tmp_table_name) client.delete_table(TableName=self.tmp_table_name) - return dynamodb_hook.get_conn().Table(self.dynamodb_table_name).table_arn + return dynamodb_hook.get_conn().Table(self.dynamodb_table_name).table_arn def execute(self, context: Context) -> str: """ From 73f2eab68081e966fd808bfaca923eed1f81bc43 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 11:22:47 +0100 Subject: [PATCH 032/137] serialize asset/dataset timetable conditions in OpenLineage info also for Airflow 2 (#43434) Signed-off-by: Maciej Obuchowski --- .../common/compat/assets/__init__.py | 5 +- .../openlineage/extractors/manager.py | 2 + .../providers/openlineage/utils/utils.py | 24 +++- .../tests/openlineage/plugins/test_utils.py | 125 +++++++++++++++++- 4 files changed, 153 insertions(+), 3 deletions(-) diff --git a/providers/src/airflow/providers/common/compat/assets/__init__.py b/providers/src/airflow/providers/common/compat/assets/__init__.py index 460204a4e417..e302395f701e 100644 --- a/providers/src/airflow/providers/common/compat/assets/__init__.py +++ b/providers/src/airflow/providers/common/compat/assets/__init__.py @@ -47,11 +47,14 @@ _IS_AIRFLOW_2_10_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.10.0") _IS_AIRFLOW_2_9_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.9.0") + _IS_AIRFLOW_2_8_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.8.0") # dataset is renamed to asset since Airflow 3.0 - from airflow.auth.managers.models.resource_details import DatasetDetails as AssetDetails from airflow.datasets import Dataset as Asset + if _IS_AIRFLOW_2_8_OR_HIGHER: + from airflow.auth.managers.models.resource_details import DatasetDetails as AssetDetails + if _IS_AIRFLOW_2_9_OR_HIGHER: from airflow.datasets import ( DatasetAll as AssetAll, diff --git a/providers/src/airflow/providers/openlineage/extractors/manager.py b/providers/src/airflow/providers/openlineage/extractors/manager.py index f6d572bae531..be824335718b 100644 --- a/providers/src/airflow/providers/openlineage/extractors/manager.py +++ b/providers/src/airflow/providers/openlineage/extractors/manager.py @@ -198,6 +198,8 @@ def get_hook_lineage(self) -> tuple[list[Dataset], list[Dataset]] | None: except ImportError: return None + if not hasattr(get_hook_lineage_collector(), "has_collected"): + return None if not get_hook_lineage_collector().has_collected: return None diff --git a/providers/src/airflow/providers/openlineage/utils/utils.py b/providers/src/airflow/providers/openlineage/utils/utils.py index a00552eed251..8c67c32f95b8 100644 --- a/providers/src/airflow/providers/openlineage/utils/utils.py +++ b/providers/src/airflow/providers/openlineage/utils/utils.py @@ -262,9 +262,31 @@ class DagInfo(InfoJsonEncodable): "start_date", "tags", ] - casts = {"timetable": lambda dag: dag.timetable.serialize() if getattr(dag, "timetable", None) else None} + casts = {"timetable": lambda dag: DagInfo.serialize_timetable(dag)} renames = {"_dag_id": "dag_id"} + @classmethod + def serialize_timetable(cls, dag: DAG) -> dict[str, Any]: + serialized = dag.timetable.serialize() + if serialized != {} and serialized is not None: + return serialized + if ( + hasattr(dag, "dataset_triggers") + and isinstance(dag.dataset_triggers, list) + and len(dag.dataset_triggers) + ): + triggers = dag.dataset_triggers + return { + "dataset_condition": { + "__type": "dataset_all", + "objects": [ + {"__type": "dataset", "uri": trigger.uri, "extra": trigger.extra} + for trigger in triggers + ], + } + } + return {} + class DagRunInfo(InfoJsonEncodable): """Defines encoding DagRun object to JSON.""" diff --git a/providers/tests/openlineage/plugins/test_utils.py b/providers/tests/openlineage/plugins/test_utils.py index 624bdecb5b45..531e21d42de1 100644 --- a/providers/tests/openlineage/plugins/test_utils.py +++ b/providers/tests/openlineage/plugins/test_utils.py @@ -29,8 +29,10 @@ from pkg_resources import parse_version from airflow.models import DAG as AIRFLOW_DAG, DagModel +from airflow.providers.common.compat.assets import Asset from airflow.providers.openlineage.plugins.facets import AirflowDebugRunFacet from airflow.providers.openlineage.utils.utils import ( + DagInfo, InfoJsonEncodable, OpenLineageRedactor, _get_all_packages_installed, @@ -40,11 +42,18 @@ get_fully_qualified_class_name, is_operator_disabled, ) +from airflow.serialization.enums import DagAttributeTypes from airflow.utils import timezone from airflow.utils.log.secrets_masker import _secrets_masker from airflow.utils.state import State -from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator +from tests_common.test_utils.compat import ( + AIRFLOW_V_2_8_PLUS, + AIRFLOW_V_2_9_PLUS, + AIRFLOW_V_2_10_PLUS, + AIRFLOW_V_3_0_PLUS, + BashOperator, +) if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -322,3 +331,117 @@ def test_does_not_include_full_task_info(mock_include_full_task_info): MagicMock(), )["airflow"].task ) + + +@pytest.mark.db_test +@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="This test checks serialization only in 3.0 conditions") +def test_serialize_timetable(): + from airflow.providers.common.compat.assets import AssetAlias, AssetAll, AssetAny + from airflow.timetables.simple import AssetTriggeredTimetable + + asset = AssetAny( + Asset("2"), + AssetAlias("example-alias"), + Asset("3"), + AssetAll(AssetAlias("this-should-not-be-seen"), Asset("4")), + ) + dag = MagicMock() + dag.timetable = AssetTriggeredTimetable(asset) + dag_info = DagInfo(dag) + + assert dag_info.timetable == { + "asset_condition": { + "__type": DagAttributeTypes.ASSET_ANY, + "objects": [ + {"__type": DagAttributeTypes.ASSET, "extra": {}, "uri": "2"}, + {"__type": DagAttributeTypes.ASSET_ANY, "objects": []}, + {"__type": DagAttributeTypes.ASSET, "extra": {}, "uri": "3"}, + { + "__type": DagAttributeTypes.ASSET_ALL, + "objects": [ + {"__type": DagAttributeTypes.ASSET_ANY, "objects": []}, + {"__type": DagAttributeTypes.ASSET, "extra": {}, "uri": "4"}, + ], + }, + ], + } + } + + +@pytest.mark.db_test +@pytest.mark.skipif( + not AIRFLOW_V_2_10_PLUS or AIRFLOW_V_3_0_PLUS, + reason="This test checks serialization only in 2.10 conditions", +) +def test_serialize_timetable_2_10(): + from airflow.providers.common.compat.assets import AssetAlias, AssetAll, AssetAny + from airflow.timetables.simple import DatasetTriggeredTimetable + + asset = AssetAny( + Asset("2"), + AssetAlias("example-alias"), + Asset("3"), + AssetAll(AssetAlias("this-should-not-be-seen"), Asset("4")), + ) + + dag = MagicMock() + dag.timetable = DatasetTriggeredTimetable(asset) + dag_info = DagInfo(dag) + + assert dag_info.timetable == { + "dataset_condition": { + "__type": DagAttributeTypes.DATASET_ANY, + "objects": [ + {"__type": DagAttributeTypes.DATASET, "extra": None, "uri": "2"}, + {"__type": DagAttributeTypes.DATASET_ANY, "objects": []}, + {"__type": DagAttributeTypes.DATASET, "extra": None, "uri": "3"}, + { + "__type": DagAttributeTypes.DATASET_ALL, + "objects": [ + {"__type": DagAttributeTypes.DATASET_ANY, "objects": []}, + {"__type": DagAttributeTypes.DATASET, "extra": None, "uri": "4"}, + ], + }, + ], + } + } + + +@pytest.mark.skipif( + not AIRFLOW_V_2_9_PLUS or AIRFLOW_V_2_10_PLUS, + reason="This test checks serialization only in 2.9 conditions", +) +def test_serialize_timetable_2_9(): + dag = MagicMock() + dag.timetable.serialize.return_value = {} + dag.dataset_triggers = [Asset("a"), Asset("b")] + dag_info = DagInfo(dag) + assert dag_info.timetable == { + "dataset_condition": { + "__type": "dataset_all", + "objects": [ + {"__type": "dataset", "extra": None, "uri": "a"}, + {"__type": "dataset", "extra": None, "uri": "b"}, + ], + } + } + + +@pytest.mark.skipif( + not AIRFLOW_V_2_8_PLUS or AIRFLOW_V_2_9_PLUS, + reason="This test checks serialization only in 2.8 conditions", +) +def test_serialize_timetable_2_8(): + dag = MagicMock() + dag.timetable.serialize.return_value = {} + dag.dataset_triggers = [Asset("a"), Asset("b")] + dag_info = DagInfo(dag) + assert dag_info.timetable == { + "dataset_condition": { + "__type": "dataset_all", + "objects": [ + {"__type": "dataset", "extra": None, "uri": "a"}, + {"__type": "dataset", "extra": None, "uri": "b"}, + ], + } + } From 6548d50841fead18d1b66fd10a76a8b94b3e50b0 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU <68415893+jason810496@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:51:22 +0800 Subject: [PATCH 033/137] AIP-84 Get Event Logs (#43407) * AIP-84 Get Event Logs * fix: add http execption docs for router * refactor: remove `FilterParam` out of this PR --- .../endpoints/event_log_endpoint.py | 1 + .../core_api/openapi/v1-generated.yaml | 164 ++++++++++++++++++ .../core_api/routes/public/event_logs.py | 97 ++++++++++- .../core_api/serializers/event_logs.py | 7 + airflow/ui/openapi-gen/queries/common.ts | 62 +++++++ airflow/ui/openapi-gen/queries/prefetch.ts | 90 ++++++++++ airflow/ui/openapi-gen/queries/queries.ts | 99 +++++++++++ airflow/ui/openapi-gen/queries/suspense.ts | 99 +++++++++++ .../ui/openapi-gen/requests/schemas.gen.ts | 20 +++ .../ui/openapi-gen/requests/services.gen.ts | 53 ++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 50 ++++++ .../core_api/routes/public/test_event_logs.py | 130 ++++++++++++++ 12 files changed, 871 insertions(+), 1 deletion(-) diff --git a/airflow/api_connexion/endpoints/event_log_endpoint.py b/airflow/api_connexion/endpoints/event_log_endpoint.py index 8084c2ecab67..93b951a35888 100644 --- a/airflow/api_connexion/endpoints/event_log_endpoint.py +++ b/airflow/api_connexion/endpoints/event_log_endpoint.py @@ -52,6 +52,7 @@ def get_event_log(*, event_log_id: int, session: Session = NEW_SESSION) -> APIRe return event_log_schema.dump(event_log) +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.AUDIT_LOG) @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 3dcff4b2d066..e844cbceeb47 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1390,6 +1390,154 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/eventLogs/: + get: + tags: + - Event Log + summary: Get Event Logs + description: Get all Event Logs. + operationId: get_event_logs + parameters: + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: task_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Task Id + - name: run_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Run Id + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: try_number + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Try Number + - name: owner + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Owner + - name: event + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Event + - name: excluded_events + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Excluded Events + - name: included_events + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Included Events + - name: before + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Before + - name: after + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: After + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/EventLogCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/monitor/health: get: tags: @@ -3149,6 +3297,22 @@ components: This is the set of allowable values for the ``warning_type`` field in the DagWarning model.' + EventLogCollectionResponse: + properties: + event_logs: + items: + $ref: '#/components/schemas/EventLogResponse' + type: array + title: Event Logs + total_entries: + type: integer + title: Total Entries + type: object + required: + - event_logs + - total_entries + title: EventLogCollectionResponse + description: Event Log Collection Response. EventLogResponse: properties: event_log_id: diff --git a/airflow/api_fastapi/core_api/routes/public/event_logs.py b/airflow/api_fastapi/core_api/routes/public/event_logs.py index 537bb5ffe4df..462c26696957 100644 --- a/airflow/api_fastapi/core_api/routes/public/event_logs.py +++ b/airflow/api_fastapi/core_api/routes/public/event_logs.py @@ -16,17 +16,26 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException, status +from datetime import datetime + +from fastapi import Depends, HTTPException, Query, status from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated from airflow.api_fastapi.common.db.common import ( get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryLimit, + QueryOffset, + SortParam, ) from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.serializers.event_logs import ( + EventLogCollectionResponse, EventLogResponse, ) from airflow.models import Log @@ -51,3 +60,89 @@ async def get_event_log( event_log, from_attributes=True, ) + + +@event_logs_router.get( + "/", + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), +) +async def get_event_logs( + limit: QueryLimit, + offset: QueryOffset, + session: Annotated[Session, Depends(get_session)], + order_by: Annotated[ + SortParam, + Depends( + SortParam( + [ + "id", # event_log_id + "dttm", # when + "dag_id", + "task_id", + "run_id", + "event", + "execution_date", # logical_date + "owner", + "extra", + ], + Log, + ).dynamic_depends() + ), + ], + dag_id: str | None = None, + task_id: str | None = None, + run_id: str | None = None, + map_index: int | None = None, + try_number: int | None = None, + owner: str | None = None, + event: str | None = None, + excluded_events: list[str] | None = Query(None), + included_events: list[str] | None = Query(None), + before: datetime | None = None, + after: datetime | None = None, +) -> EventLogCollectionResponse: + """Get all Event Logs.""" + base_select = select(Log).group_by(Log.id) + # TODO: Refactor using the `FilterParam` class in commit `574b72e41cc5ed175a2bbf4356522589b836bb11` + if dag_id is not None: + base_select = base_select.where(Log.dag_id == dag_id) + if task_id is not None: + base_select = base_select.where(Log.task_id == task_id) + if run_id is not None: + base_select = base_select.where(Log.run_id == run_id) + if map_index is not None: + base_select = base_select.where(Log.map_index == map_index) + if try_number is not None: + base_select = base_select.where(Log.try_number == try_number) + if owner is not None: + base_select = base_select.where(Log.owner == owner) + if event is not None: + base_select = base_select.where(Log.event == event) + if excluded_events is not None: + base_select = base_select.where(Log.event.notin_(excluded_events)) + if included_events is not None: + base_select = base_select.where(Log.event.in_(included_events)) + if before is not None: + base_select = base_select.where(Log.dttm < before) + if after is not None: + base_select = base_select.where(Log.dttm > after) + event_logs_select, total_entries = paginated_select( + base_select, + [], + order_by, + offset, + limit, + session, + ) + event_logs = session.scalars(event_logs_select).all() + + return EventLogCollectionResponse( + event_logs=[ + EventLogResponse.model_validate( + event_log, + from_attributes=True, + ) + for event_log in event_logs + ], + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/serializers/event_logs.py b/airflow/api_fastapi/core_api/serializers/event_logs.py index e295dc35061f..f70e5bd15834 100644 --- a/airflow/api_fastapi/core_api/serializers/event_logs.py +++ b/airflow/api_fastapi/core_api/serializers/event_logs.py @@ -38,3 +38,10 @@ class EventLogResponse(BaseModel): extra: str | None model_config = ConfigDict(populate_by_name=True) + + +class EventLogCollectionResponse(BaseModel): + """Event Log Collection Response.""" + + event_logs: list[EventLogResponse] + total_entries: int diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 75a343ce74c7..1248a77ce188 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -359,6 +359,68 @@ export const UseEventLogServiceGetEventLogKeyFn = ( }, queryKey?: Array, ) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; +export type EventLogServiceGetEventLogsDefaultResponse = Awaited< + ReturnType +>; +export type EventLogServiceGetEventLogsQueryResult< + TData = EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useEventLogServiceGetEventLogsKey = "EventLogServiceGetEventLogs"; +export const UseEventLogServiceGetEventLogsKeyFn = ( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: Array, +) => [ + useEventLogServiceGetEventLogsKey, + ...(queryKey ?? [ + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + ]), +]; export type MonitorServiceGetHealthDefaultResponse = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 63e8d4b43132..bf6ad800be01 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -453,6 +453,96 @@ export const prefetchUseEventLogServiceGetEventLog = ( queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), queryFn: () => EventLogService.getEventLog({ eventLogId }), }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseEventLogServiceGetEventLogs = ( + queryClient: QueryClient, + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 5f8f649372db..70796be40171 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -572,6 +572,105 @@ export const useEventLogServiceGetEventLog = < queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options, }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogs = < + TData = Common.EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + queryKey, + ), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }) as TData, + ...options, + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 1222b7f5536c..4f75c2ba0ce4 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -558,6 +558,105 @@ export const useEventLogServiceGetEventLogSuspense = < queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options, }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogsSuspense = < + TData = Common.EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + queryKey, + ), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }) as TData, + ...options, + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 906bb43df988..53272eae2e77 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1502,6 +1502,26 @@ This is the set of allowable values for the \`\`warning_type\`\` field in the DagWarning model.`, } as const; +export const $EventLogCollectionResponse = { + properties: { + event_logs: { + items: { + $ref: "#/components/schemas/EventLogResponse", + }, + type: "array", + title: "Event Logs", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["event_logs", "total_entries"], + title: "EventLogCollectionResponse", + description: "Event Log Collection Response.", +} as const; + export const $EventLogResponse = { properties: { event_log_id: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index e82d0c5146fa..fa5c7739c909 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -51,6 +51,8 @@ import type { GetDagSourceResponse, GetEventLogData, GetEventLogResponse, + GetEventLogsData, + GetEventLogsResponse, GetHealthResponse, ListDagWarningsData, ListDagWarningsResponse, @@ -810,6 +812,57 @@ export class EventLogService { }, }); } + + /** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ + public static getEventLogs( + data: GetEventLogsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/eventLogs/", + query: { + dag_id: data.dagId, + task_id: data.taskId, + run_id: data.runId, + map_index: data.mapIndex, + try_number: data.tryNumber, + owner: data.owner, + event: data.event, + excluded_events: data.excludedEvents, + included_events: data.includedEvents, + before: data.before, + after: data.after, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } } export class MonitorService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index afe771e5a1f6..909b78dd627e 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -347,6 +347,14 @@ export type DagTagPydantic = { */ export type DagWarningType = "asset conflict" | "non-existent pool"; +/** + * Event Log Collection Response. + */ +export type EventLogCollectionResponse = { + event_logs: Array; + total_entries: number; +}; + /** * Event Log Response. */ @@ -851,6 +859,25 @@ export type GetEventLogData = { export type GetEventLogResponse = EventLogResponse; +export type GetEventLogsData = { + after?: string | null; + before?: string | null; + dagId?: string | null; + event?: string | null; + excludedEvents?: Array | null; + includedEvents?: Array | null; + limit?: number; + mapIndex?: number | null; + offset?: number; + orderBy?: string; + owner?: string | null; + runId?: string | null; + taskId?: string | null; + tryNumber?: number | null; +}; + +export type GetEventLogsResponse = EventLogCollectionResponse; + export type GetHealthResponse = HealthInfoSchema; export type ListDagWarningsData = { @@ -1601,6 +1628,29 @@ export type $OpenApiTs = { }; }; }; + "/public/eventLogs/": { + get: { + req: GetEventLogsData; + res: { + /** + * Successful Response + */ + 200: EventLogCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/monitor/health": { get: { res: { diff --git a/tests/api_fastapi/core_api/routes/public/test_event_logs.py b/tests/api_fastapi/core_api/routes/public/test_event_logs.py index c329015b9f9e..347f587bf5c2 100644 --- a/tests/api_fastapi/core_api/routes/public/test_event_logs.py +++ b/tests/api_fastapi/core_api/routes/public/test_event_logs.py @@ -175,3 +175,133 @@ def test_get_event_log(self, test_client, setup, event_log_key, expected_status_ } assert response.json() == expected_json + + +class TestGetEventLogs(TestEventLogsEndpoint): + @pytest.mark.parametrize( + "query_params, expected_status_code, expected_total_entries, expected_events", + [ + ( + {}, + 200, + 4, + [EVENT_NORMAL, EVENT_WITH_OWNER, TASK_INSTANCE_EVENT, EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + # offset, limit + ( + {"offset": 1, "limit": 2}, + 200, + 4, + [EVENT_WITH_OWNER, TASK_INSTANCE_EVENT], + ), + # equal filter + ( + {"event": EVENT_NORMAL}, + 200, + 1, + [EVENT_NORMAL], + ), + ( + {"event": EVENT_WITH_OWNER}, + 200, + 1, + [EVENT_WITH_OWNER], + ), + ( + {"task_id": TASK_ID}, + 200, + 2, + [TASK_INSTANCE_EVENT, EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + # multiple equal filters + ( + {"event": EVENT_WITH_OWNER, "owner": OWNER}, + 200, + 1, + [EVENT_WITH_OWNER], + ), + ( + {"event": EVENT_WITH_OWNER_AND_TASK_INSTANCE, "task_id": TASK_ID, "run_id": DAG_RUN_ID}, + 200, + 1, + [EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + # list filter + ( + {"excluded_events": [EVENT_NORMAL, EVENT_WITH_OWNER]}, + 200, + 2, + [TASK_INSTANCE_EVENT, EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + ( + {"included_events": [EVENT_NORMAL, EVENT_WITH_OWNER]}, + 200, + 2, + [EVENT_NORMAL, EVENT_WITH_OWNER], + ), + # multiple list filters + ( + {"excluded_events": [EVENT_NORMAL], "included_events": [EVENT_WITH_OWNER]}, + 200, + 1, + [EVENT_WITH_OWNER], + ), + # before, after filters + ( + {"before": "2024-06-15T00:00:00Z"}, + 200, + 0, + [], + ), + ( + {"after": "2024-06-15T00:00:00Z"}, + 200, + 4, + [EVENT_NORMAL, EVENT_WITH_OWNER, TASK_INSTANCE_EVENT, EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + # order_by + ( + {"order_by": "-id"}, + 200, + 4, + [EVENT_WITH_OWNER_AND_TASK_INSTANCE, TASK_INSTANCE_EVENT, EVENT_WITH_OWNER, EVENT_NORMAL], + ), + ( + {"order_by": "execution_date"}, + 200, + 4, + [TASK_INSTANCE_EVENT, EVENT_WITH_OWNER_AND_TASK_INSTANCE, EVENT_NORMAL, EVENT_WITH_OWNER], + ), + # combination of query parameters + ( + {"offset": 1, "excluded_events": ["non_existed_event"], "order_by": "event"}, + 200, + 4, + [EVENT_WITH_OWNER_AND_TASK_INSTANCE, EVENT_NORMAL, TASK_INSTANCE_EVENT], + ), + ( + {"excluded_events": [EVENT_NORMAL], "included_events": [EVENT_WITH_OWNER], "order_by": "-id"}, + 200, + 1, + [EVENT_WITH_OWNER], + ), + ( + {"map_index": -1, "try_number": 0, "order_by": "event", "limit": 1}, + 200, + 2, + [EVENT_WITH_OWNER_AND_TASK_INSTANCE], + ), + ], + ) + def test_get_event_logs( + self, test_client, query_params, expected_status_code, expected_total_entries, expected_events + ): + response = test_client.get("/public/eventLogs/", params=query_params) + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + + resp_json = response.json() + assert resp_json["total_entries"] == expected_total_entries + for event_log, expected_event in zip(resp_json["event_logs"], expected_events): + assert event_log["event"] == expected_event From 18ea01cef2b92fe820ceaa33be7b44f9f576aad4 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 5 Nov 2024 12:01:30 +0100 Subject: [PATCH 034/137] Fix reproducibility of prepared provider packages (fix flit frontend) (#43683) After some checks it turned out that reproducibility of produced packages depends not only on the build backend configured for the project but also on the build front-end used - because frontend is the one to modify meta-data in prepared packages - including the build tool used, it's version and metadata version supported by the front-end. That's why in order to maintain reproducibility for anyone who builds the packages, we have to pin not only the build backend in pyproject.toml (flit-core) but also build fronted used (flit). Since package preparation is done with breeze, we can do it by pinning flit (and just in case also flit-core) so that anyone who builds specific version of the package will use exactly the same flit as the person who built the original packages. This way we will avoid reproducibility problems experienced with 1.5.0 release of FAB. --- dev/README_RELEASE_PROVIDER_PACKAGES.md | 1 - dev/breeze/README.md | 2 +- dev/breeze/pyproject.toml | 15 ++++++++++++++- .../commands/release_candidate_command.py | 3 --- .../commands/release_management_commands.py | 8 +------- .../templates/pyproject_TEMPLATE.toml.jinja2 | 3 +-- .../src/airflow_breeze/utils/python_versions.py | 14 -------------- .../src/airflow_breeze/utils/reproducible.py | 3 --- dev/breeze/uv.lock | 4 +++- 9 files changed, 20 insertions(+), 33 deletions(-) diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index a96d07d9441b..49631dd85876 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -349,7 +349,6 @@ export AIRFLOW_REPO_ROOT=$(pwd -P) rm -rf ${AIRFLOW_REPO_ROOT}/dist/* ``` - * Release candidate packages: ```shell script diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 9a3f06066eb2..70ad4076750a 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -136,6 +136,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: f13c42703e0a262d9f3c1bee608ff32c368be4c6a11f150a2f95809938641f5ec07904d5cc2e3944dfe4d206dc52846f8b81193fc279a333ff898dd033e07be4 +Package config hash: 5d32e2c150de1cc22d522f94d86500c739f434439ae064e35791ac795932a1f21509c3d0fcf9f2351e7901a32601190bd4cb640799620a44d0ff6d9244aef57c --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml index a52b2f977585..f5b4b9b98c4e 100644 --- a/dev/breeze/pyproject.toml +++ b/dev/breeze/pyproject.toml @@ -47,7 +47,20 @@ dependencies = [ "black>=23.11.0", "click>=8.1.7", "filelock>=3.13.0", - "flit>=3.5.0", + # + # We pin flit in order to make sure reproducibility of provider packages is maintained + # It turns out that when packages are prepared metadata version in the produced packages + # is taken from the front-end not from the backend, so in order to make sure that the + # packages are reproducible, we should pin both backend in "build-system" and frontend in + # "dependencies" of the environment that is used to build the packages. + # + # TODO(potiuk): automate bumping the version of flit in breeze and sync it with + # the version in the template for provider packages with pre-commit also add instructions in + # the source packages explaining that reproducibility can only be achieved by using the same + # version of flit front-end to build the package + # + "flit==3.10.1", + "flit-core==3.10.1", "google-api-python-client>=2.142.0", "google-auth-httplib2>=0.2.0", "google-auth-oauthlib>=1.2.0", diff --git a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py index 7fcba61f1066..d5e2bd6f9d3b 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py +++ b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py @@ -27,7 +27,6 @@ from airflow_breeze.utils.confirm import confirm_action from airflow_breeze.utils.console import console_print from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, DIST_DIR, OUT_DIR -from airflow_breeze.utils.python_versions import check_python_version from airflow_breeze.utils.reproducible import get_source_date_epoch, repack_deterministically from airflow_breeze.utils.run_utils import run_command @@ -341,7 +340,6 @@ def remove_old_releases(version, repo_root): "--version", required=True, help="The release candidate version e.g. 2.4.3rc1", envvar="VERSION" ) def prepare_airflow_tarball(version: str): - check_python_version() from packaging.version import Version airflow_version = Version(version) @@ -367,7 +365,6 @@ def prepare_airflow_tarball(version: str): ) @option_answer def publish_release_candidate(version, previous_version, github_token): - check_python_version() from packaging.version import Version airflow_version = Version(version) diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 61a26e9993f8..6955d38e0eb7 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -151,7 +151,7 @@ generate_providers_metadata_for_package, get_related_providers, ) -from airflow_breeze.utils.python_versions import check_python_version, get_python_version_list +from airflow_breeze.utils.python_versions import get_python_version_list from airflow_breeze.utils.reproducible import get_source_date_epoch, repack_deterministically from airflow_breeze.utils.run_utils import ( run_command, @@ -530,7 +530,6 @@ def prepare_airflow_packages( version_suffix_for_pypi: str, use_local_hatch: bool, ): - check_python_version() perform_environment_checks() fix_ownership_using_docker() cleanup_python_generated_files() @@ -576,7 +575,6 @@ def prepare_airflow_task_sdk_packages( package_format: str, use_local_hatch: bool, ): - check_python_version() perform_environment_checks() fix_ownership_using_docker() cleanup_python_generated_files() @@ -907,7 +905,6 @@ def prepare_provider_packages( skip_tag_check: bool, version_suffix_for_pypi: str, ): - check_python_version(release_provider_packages=True) perform_environment_checks() fix_ownership_using_docker() cleanup_python_generated_files() @@ -3182,7 +3179,6 @@ def prepare_helm_chart_tarball( ) -> None: import yaml - check_python_version() chart_yaml_file_content = CHART_YAML_FILE.read_text() chart_yaml_dict = yaml.safe_load(chart_yaml_file_content) version_in_chart = chart_yaml_dict["version"] @@ -3324,8 +3320,6 @@ def prepare_helm_chart_tarball( @option_dry_run @option_verbose def prepare_helm_chart_package(sign_email: str): - check_python_version() - import yaml from airflow_breeze.utils.kubernetes_utils import ( diff --git a/dev/breeze/src/airflow_breeze/templates/pyproject_TEMPLATE.toml.jinja2 b/dev/breeze/src/airflow_breeze/templates/pyproject_TEMPLATE.toml.jinja2 index 363b02e6136c..9849bfe8bc52 100644 --- a/dev/breeze/src/airflow_breeze/templates/pyproject_TEMPLATE.toml.jinja2 +++ b/dev/breeze/src/airflow_breeze/templates/pyproject_TEMPLATE.toml.jinja2 @@ -39,9 +39,8 @@ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -# [build-system] -requires = ["flit_core==3.10.0"] +requires = ["flit_core==3.10.1"] build-backend = "flit_core.buildapi" [project] diff --git a/dev/breeze/src/airflow_breeze/utils/python_versions.py b/dev/breeze/src/airflow_breeze/utils/python_versions.py index 4f5a7a00bb58..3ac3f8be30ff 100644 --- a/dev/breeze/src/airflow_breeze/utils/python_versions.py +++ b/dev/breeze/src/airflow_breeze/utils/python_versions.py @@ -43,17 +43,3 @@ def get_python_version_list(python_versions: str) -> list[str]: ) sys.exit(1) return python_version_list - - -def check_python_version(release_provider_packages: bool = False): - if not sys.version_info < (3, 12) and release_provider_packages: - get_console().print("[error]Python 3.12 is not supported.\n") - get_console().print( - "[warning]Please reinstall Breeze using Python 3.9 - 3.11 environment because not all " - "provider packages support Python 3.12 yet.[/]\n\n" - "If you are using uv:\n\n" - " uv tool install --force --reinstall --python 3.9 -e ./dev/breeze\n\n" - "If you are using pipx:\n\n" - " pipx install --python $(which python3.9) --force -e ./dev/breeze\n" - ) - sys.exit(1) diff --git a/dev/breeze/src/airflow_breeze/utils/reproducible.py b/dev/breeze/src/airflow_breeze/utils/reproducible.py index 1429333d6415..cf4005d9ddd1 100644 --- a/dev/breeze/src/airflow_breeze/utils/reproducible.py +++ b/dev/breeze/src/airflow_breeze/utils/reproducible.py @@ -43,7 +43,6 @@ from subprocess import CalledProcessError, CompletedProcess from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, OUT_DIR, REPRODUCIBLE_DIR -from airflow_breeze.utils.python_versions import check_python_version from airflow_breeze.utils.run_utils import run_command @@ -91,7 +90,6 @@ def reset(tarinfo): tarinfo.mtime = timestamp return tarinfo - check_python_version() OUT_DIR.mkdir(exist_ok=True) shutil.rmtree(REPRODUCIBLE_DIR, ignore_errors=True) REPRODUCIBLE_DIR.mkdir(exist_ok=True) @@ -149,7 +147,6 @@ def reset(tarinfo): def main(): - check_python_version() parser = ArgumentParser() parser.add_argument("-a", "--archive", help="archive to repack") parser.add_argument("-o", "--out", help="archive destination") diff --git a/dev/breeze/uv.lock b/dev/breeze/uv.lock index bcfc8c4ce785..0aed5ae9cd12 100644 --- a/dev/breeze/uv.lock +++ b/dev/breeze/uv.lock @@ -29,6 +29,7 @@ dependencies = [ { name = "click" }, { name = "filelock" }, { name = "flit" }, + { name = "flit-core" }, { name = "gitpython" }, { name = "google-api-python-client" }, { name = "google-auth-httplib2" }, @@ -60,7 +61,8 @@ requires-dist = [ { name = "black", specifier = ">=23.11.0" }, { name = "click", specifier = ">=8.1.7" }, { name = "filelock", specifier = ">=3.13.0" }, - { name = "flit", specifier = ">=3.5.0" }, + { name = "flit", specifier = "==3.10.1" }, + { name = "flit-core", specifier = "==3.10.1" }, { name = "gitpython", specifier = ">=3.1.40" }, { name = "google-api-python-client", specifier = ">=2.142.0" }, { name = "google-auth-httplib2", specifier = ">=0.2.0" }, From e9bc557947f2227fa7cb570c35726f9c6680260f Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Tue, 5 Nov 2024 13:53:39 +0200 Subject: [PATCH 035/137] Update providers metadata 2024-11-05 (#43677) --- generated/provider_metadata.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/generated/provider_metadata.json b/generated/provider_metadata.json index 238a2333961f..a64b8770cf79 100644 --- a/generated/provider_metadata.json +++ b/generated/provider_metadata.json @@ -3917,6 +3917,10 @@ "1.4.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-10-14T07:10:52Z" + }, + "1.5.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-11-05T07:57:33Z" } }, "facebook": { From 9ede38adad6f57c644771846c1920a53346e584f Mon Sep 17 00:00:00 2001 From: rom sharon <33751805+romsharon98@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:22:29 +0200 Subject: [PATCH 036/137] Change default value of `namespace` in `task.kubernetes` to be None (#43402) * change default namespace value to be None * passing namespace to decorator when in_cluster=False * add breaking change * change changelog * revert ui change --- .../src/airflow/providers/cncf/kubernetes/CHANGELOG.rst | 8 ++++++++ .../providers/cncf/kubernetes/decorators/kubernetes.py | 2 +- .../src/airflow/providers/cncf/kubernetes/provider.yaml | 1 + .../tests/cncf/kubernetes/decorators/test_kubernetes.py | 2 ++ 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst index c54da827b56e..28417d6ac3da 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst +++ b/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst @@ -27,6 +27,14 @@ Changelog --------- + +main +..... + +.. warning:: + Set the default value of ``namespace`` in ``@task.kubernetes`` to ``None``, so it uses the cluster namespace when ``in_cluster`` is True. Be sure to specify a namespace when using this decorator. To retain the previous behavior, set ``namespace="default"`` + + 9.0.1 ..... diff --git a/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py index 2f00de10b0c2..c3f5951540cb 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py +++ b/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py @@ -65,7 +65,7 @@ class _KubernetesDecoratedOperator(DecoratedOperator, KubernetesPodOperator): # there are some cases we can't deepcopy the objects (e.g protobuf). shallow_copy_attrs: Sequence[str] = ("python_callable",) - def __init__(self, namespace: str = "default", use_dill: bool = False, **kwargs) -> None: + def __init__(self, namespace: str | None = None, use_dill: bool = False, **kwargs) -> None: self.use_dill = use_dill super().__init__( namespace=namespace, diff --git a/providers/src/airflow/providers/cncf/kubernetes/provider.yaml b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml index a6f4b266b6ff..79c885ffb014 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/provider.yaml +++ b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml @@ -25,6 +25,7 @@ state: ready source-date-epoch: 1730012271 # note that those versions are maintained by release manager - do not update them manually versions: + - 10.0.0 - 9.0.1 - 9.0.0 - 8.4.2 diff --git a/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py b/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py index 93198cbe0a7e..9545e9adbf3d 100644 --- a/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py +++ b/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py @@ -78,6 +78,7 @@ def test_basic_kubernetes(dag_maker, session, mock_create_pod: mock.Mock, mock_h in_cluster=False, cluster_context="default", config_file="/tmp/fake_file", + namespace="default", ) def f(): import random @@ -122,6 +123,7 @@ def test_kubernetes_with_input_output( in_cluster=False, cluster_context="default", config_file="/tmp/fake_file", + namespace="default", ) def f(arg1, arg2, kwarg1=None, kwarg2=None): return {"key1": "value1", "key2": "value2"} From 3939d13224af28c855f4f79be53e0cae1c48026e Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 5 Nov 2024 13:23:23 +0000 Subject: [PATCH 037/137] AIP-72: Add "update TI state" endpoint for Execution API (#43602) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Part of https://github.com/apache/airflow/issues/43586 This PR adds a new endpoint `/execution/{task_instance_id}/state` that will allow Updating the State of the TI from the worker. Some of the interesting changes / TILs were: (hat tip to @ashb for this) To streamline the data exchange between workers and the Task Execution API, this PR adds minified schemas for Task Instance updates i.e. focuses solely on the fields necessary for specific state transitions, reducing payload size and validations. Since our TaskInstance model is huge this also keeps it clean to focus on only those fields that matter for this case. The endpoint added in this PR also leverages Pydantic’s [discriminated unions](https://docs.pydantic.dev/latest/concepts/unions/#discriminated-unions) to handle varying payload structures for each target state. This allows a single endpoint to receive different payloads (with different validations). For example: - `TIEnterRunningPayload`: Requires fields such as hostname, unixname, pid, and start_date to mark a task as RUNNING. - `TITerminalStatePayload`: Supports terminal states like SUCCESS, FAILED, SKIPPED, - `TITargetStatePayload`: Allows for other non-terminal, non-running states that a task may transition to. This is better so we don't have invalid payloads for example adding a start_date when a task is marked as SUCCESS, it doesn't make sense and it might be an error from the client! ![Nov-04-2024 20-00-26](https://github.com/user-attachments/assets/07c1a197-0238-4c1a-9783-f23dd74a8d3e) `fastapi` allows importing a handy `status` module from starlette which has status code and the reason in its name. Reference: https://fastapi.tiangolo.com/reference/status/ Example: `status.HTTP_204_NO_CONTENT` and `status.HTTP_409_CONFLICT` explain a lot more than just a "204 code" which doesn't tell much. I plan to change our current integers on public API to these in coming days. For now, I have assumed that we/the user don't care about `end_date` for `REMOVED` & `UPSTREAM_FAILED` status since they should be handled by the scheduler and shouldn't even show up on the worker. For `SKIPPED` state, since there are 2 scenarios: 1) A user can run the task and raise a `AirflowSkipException` 2) a task skipped on scheduler itself! For (1), we could set an end date, but (2) doesn't have it. - [ ] Pass a [RFC 9457](https://datatracker.ietf.org/doc/html/rfc9457) compliant error message in "detail" field of `HTTPException` to provide more information about the error - [ ] Add a separate heartbeat endpoint to track the TI’s active state. - [ ] Replace handling of `SQLAlchemyError` with FastAPI's [Custom Exception handling](https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers) across the Execution API endpoints. That way we don't need duplicate code across multiple endpoints. - [ ] Replace `None` state on TaskInstance with a `Created` state. ([link](https://github.com/orgs/apache/projects/405/views/1?pane=issue&itemId=85900878)) - [ ] Remove redundant code that also set's task type once we remove DB access from the worker. This is assuming that the Webserver or the new FastAPI endpoints don't use this endpoint. --- airflow/api_fastapi/common/types.py | 25 +++ airflow/api_fastapi/execution_api/app.py | 1 + .../execution_api/routes/__init__.py | 2 + .../execution_api/routes/health.py | 2 +- .../execution_api/routes/task_instance.py | 131 ++++++++++++ airflow/api_fastapi/execution_api/schemas.py | 114 ++++++++++ airflow/models/taskinstance.py | 36 ++++ airflow/utils/state.py | 9 + tests/api_fastapi/execution_api/conftest.py | 27 +++ .../routes/test_task_instance.py | 194 ++++++++++++++++++ 10 files changed, 540 insertions(+), 1 deletion(-) create mode 100644 airflow/api_fastapi/common/types.py create mode 100644 airflow/api_fastapi/execution_api/routes/task_instance.py create mode 100644 airflow/api_fastapi/execution_api/schemas.py create mode 100644 tests/api_fastapi/execution_api/conftest.py create mode 100644 tests/api_fastapi/execution_api/routes/test_task_instance.py diff --git a/airflow/api_fastapi/common/types.py b/airflow/api_fastapi/common/types.py new file mode 100644 index 000000000000..d9664c072213 --- /dev/null +++ b/airflow/api_fastapi/common/types.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from pydantic import AfterValidator, AwareDatetime +from typing_extensions import Annotated + +from airflow.utils import timezone + +UtcDateTime = Annotated[AwareDatetime, AfterValidator(lambda d: d.astimezone(timezone.utc))] +"""UTCDateTime is a datetime with timezone information""" diff --git a/airflow/api_fastapi/execution_api/app.py b/airflow/api_fastapi/execution_api/app.py index 771b81c43ae0..8f4cd3fd0a89 100644 --- a/airflow/api_fastapi/execution_api/app.py +++ b/airflow/api_fastapi/execution_api/app.py @@ -24,6 +24,7 @@ def create_task_execution_api_app(app: FastAPI) -> FastAPI: """Create FastAPI app for task execution API.""" from airflow.api_fastapi.execution_api.routes import execution_api_router + # TODO: Add versioning to the API task_exec_api_app = FastAPI( title="Airflow Task Execution API", description="The private Airflow Task Execution API.", diff --git a/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow/api_fastapi/execution_api/routes/__init__.py index 3d8761caefbc..55ee56b6168f 100644 --- a/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow/api_fastapi/execution_api/routes/__init__.py @@ -18,6 +18,8 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.execution_api.routes.health import health_router +from airflow.api_fastapi.execution_api.routes.task_instance import ti_router execution_api_router = AirflowRouter() execution_api_router.include_router(health_router) +execution_api_router.include_router(ti_router) diff --git a/airflow/api_fastapi/execution_api/routes/health.py b/airflow/api_fastapi/execution_api/routes/health.py index 21ef586b8c76..e0d51e3c7145 100644 --- a/airflow/api_fastapi/execution_api/routes/health.py +++ b/airflow/api_fastapi/execution_api/routes/health.py @@ -19,7 +19,7 @@ from airflow.api_fastapi.common.router import AirflowRouter -health_router = AirflowRouter(tags=["Task SDK"]) +health_router = AirflowRouter(tags=["Health"]) @health_router.get("/health") diff --git a/airflow/api_fastapi/execution_api/routes/task_instance.py b/airflow/api_fastapi/execution_api/routes/task_instance.py new file mode 100644 index 000000000000..05ce18496409 --- /dev/null +++ b/airflow/api_fastapi/execution_api/routes/task_instance.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +from uuid import UUID + +from fastapi import Body, Depends, HTTPException, status +from sqlalchemy import update +from sqlalchemy.exc import NoResultFound, SQLAlchemyError +from sqlalchemy.orm import Session +from sqlalchemy.sql import select +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.execution_api import schemas +from airflow.models.taskinstance import TaskInstance as TI +from airflow.utils.state import State + +# TODO: Add dependency on JWT token +ti_router = AirflowRouter( + prefix="/task_instance", + tags=["Task Instance"], +) + + +log = logging.getLogger(__name__) + + +@ti_router.patch( + "/{task_instance_id}/state", + status_code=status.HTTP_204_NO_CONTENT, + # TODO: Add Operation ID to control the function name in the OpenAPI spec + # TODO: Do we need to use create_openapi_http_exception_doc here? + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Task Instance not found"}, + status.HTTP_409_CONFLICT: {"description": "The TI is already in the requested state"}, + status.HTTP_422_UNPROCESSABLE_ENTITY: {"description": "Invalid payload for the state transition"}, + }, +) +async def ti_update_state( + task_instance_id: UUID, + ti_patch_payload: Annotated[schemas.TIStateUpdate, Body()], + session: Annotated[Session, Depends(get_session)], +): + """ + Update the state of a TaskInstance. + + Not all state transitions are valid, and transitioning to some states required extra information to be + passed along. (Check our the schemas for details, the rendered docs might not reflect this accurately) + """ + # We only use UUID above for validation purposes + ti_id_str = str(task_instance_id) + + old = select(TI.state).where(TI.id == ti_id_str).with_for_update() + try: + (previous_state,) = session.execute(old).one() + except NoResultFound: + log.error("Task Instance %s not found", ti_id_str) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": "Task Instance not found", + }, + ) + + # We exclude_unset to avoid updating fields that are not set in the payload + data = ti_patch_payload.model_dump(exclude_unset=True) + + query = update(TI).where(TI.id == ti_id_str).values(data) + + if isinstance(ti_patch_payload, schemas.TIEnterRunningPayload): + if previous_state != State.QUEUED: + log.warning( + "Can not start Task Instance ('%s') in invalid state: %s", + ti_id_str, + previous_state, + ) + + # TODO: Pass a RFC 9457 compliant error message in "detail" field + # https://datatracker.ietf.org/doc/html/rfc9457 + # to provide more information about the error + # FastAPI will automatically convert this to a JSON response + # This might be added in FastAPI in https://github.com/fastapi/fastapi/issues/10370 + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail={ + "reason": "invalid_state", + "message": "TI was not in a state where it could be marked as running", + "previous_state": previous_state, + }, + ) + log.info("Task with %s state started on %s ", previous_state, ti_patch_payload.hostname) + # Ensure there is no end date set. + query = query.values( + end_date=None, + hostname=ti_patch_payload.hostname, + unixname=ti_patch_payload.unixname, + pid=ti_patch_payload.pid, + state=State.RUNNING, + ) + elif isinstance(ti_patch_payload, schemas.TITerminalStatePayload): + query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) + + # TODO: Replace this with FastAPI's Custom Exception handling: + # https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers + try: + result = session.execute(query) + log.info("TI %s state updated: %s row(s) affected", ti_id_str, result.rowcount) + except SQLAlchemyError as e: + log.error("Error updating Task Instance state: %s", e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Database error occurred" + ) diff --git a/airflow/api_fastapi/execution_api/schemas.py b/airflow/api_fastapi/execution_api/schemas.py new file mode 100644 index 000000000000..3b60b109d9ca --- /dev/null +++ b/airflow/api_fastapi/execution_api/schemas.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from typing import Annotated, Literal, Union + +from pydantic import ( + BaseModel, + ConfigDict, + Discriminator, + Field, + Tag, + WithJsonSchema, +) + +from airflow.api_fastapi.common.types import UtcDateTime +from airflow.utils.state import State, TaskInstanceState as TIState + + +class TIEnterRunningPayload(BaseModel): + """Schema for updating TaskInstance to 'RUNNING' state with minimal required fields.""" + + model_config = ConfigDict(from_attributes=True) + + state: Annotated[ + Literal[TIState.RUNNING], + # Specify a default in the schema, but not in code, so Pydantic marks it as required. + WithJsonSchema({"enum": [TIState.RUNNING], "default": TIState.RUNNING}), + ] + hostname: str + """Hostname where this task has started""" + unixname: str + """Local username of the process where this task has started""" + pid: int + """Process Identifier on `hostname`""" + start_date: UtcDateTime + """When the task started executing""" + + +class TITerminalStatePayload(BaseModel): + """Schema for updating TaskInstance to a terminal state (e.g., SUCCESS or FAILED).""" + + state: Annotated[ + Literal[TIState.SUCCESS, TIState.FAILED, TIState.SKIPPED], + Field(title="TerminalState"), + WithJsonSchema({"enum": list(State.ran_and_finished_states)}), + ] + + end_date: UtcDateTime + """When the task completed executing""" + + +class TITargetStatePayload(BaseModel): + """Schema for updating TaskInstance to a target state, excluding terminal and running states.""" + + state: Annotated[ + TIState, + # For the OpenAPI schema generation, + # make sure we do not include RUNNING as a valid state here + WithJsonSchema( + { + "enum": [ + state for state in TIState if state not in (State.ran_and_finished_states | {State.NONE}) + ] + } + ), + ] + + +def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: + """ + Determine the discriminator key for TaskInstance state transitions. + + This function serves as a discriminator for the TIStateUpdate union schema, + categorizing the payload based on the ``state`` attribute in the input data. + It returns a key that directs FastAPI to the appropriate subclass (schema) + based on the requested state. + """ + if isinstance(v, dict): + state = v.get("state") + else: + state = getattr(v, "state", None) + if state == TIState.RUNNING: + return str(state) + elif state in State.ran_and_finished_states: + return "_terminal_" + return "_other_" + + +# It is called "_terminal_" to avoid future conflicts if we added an actual state named "terminal" +# and "_other_" is a catch-all for all other states that are not covered by the other schemas. +TIStateUpdate = Annotated[ + Union[ + Annotated[TIEnterRunningPayload, Tag("running")], + Annotated[TITerminalStatePayload, Tag("_terminal_")], + Annotated[TITargetStatePayload, Tag("_other_")], + ], + Discriminator(ti_state_discriminator), +] diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index dfd776e685a0..c525a40a14ab 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -54,6 +54,7 @@ UniqueConstraint, and_, delete, + extract, false, func, inspect, @@ -151,7 +152,9 @@ from pathlib import PurePath from types import TracebackType + from sqlalchemy.engine import Connection as SAConnection, Engine from sqlalchemy.orm.session import Session + from sqlalchemy.sql import Update from sqlalchemy.sql.elements import BooleanClauseList from sqlalchemy.sql.expression import ColumnOperators @@ -3843,6 +3846,39 @@ def clear_db_references(self, session: Session): ) ) + @classmethod + def duration_expression_update( + cls, end_date: datetime, query: Update, bind: Engine | SAConnection + ) -> Update: + """Return a SQL expression for calculating the duration of this TI, based on the start and end date columns.""" + # TODO: Compare it with self._set_duration method + + if bind.dialect.name == "sqlite": + return query.values( + { + "end_date": end_date, + "duration": (func.julianday(end_date) - func.julianday(cls.start_date)) * 86400, + } + ) + elif bind.dialect.name == "postgresql": + return query.values( + { + "end_date": end_date, + "duration": extract("EPOCH", end_date - cls.start_date), + } + ) + + return query.values( + { + "end_date": end_date, + "duration": ( + func.timestampdiff(text("MICROSECOND"), cls.start_date, end_date) + # Turn microseconds into floating point seconds. + / 1_000_000 + ), + } + ) + def _find_common_ancestor_mapped_group(node1: Operator, node2: Operator) -> MappedTaskGroup | None: """Given two operators, find their innermost common mapped task group.""" diff --git a/airflow/utils/state.py b/airflow/utils/state.py index 87ce20effc07..246c157611bd 100644 --- a/airflow/utils/state.py +++ b/airflow/utils/state.py @@ -199,3 +199,12 @@ def color_fg(cls, state): A list of states indicating that a task can be adopted or reset by a scheduler job if it was queued by another scheduler job that is not running anymore. """ + + ran_and_finished_states = frozenset( + [TaskInstanceState.SUCCESS, TaskInstanceState.FAILED, TaskInstanceState.SKIPPED] + ) + """ + A list of states indicating that a task has run and finished. This excludes states like + removed and upstream_failed. Skipped is included because a user can raise a + AirflowSkipException in a task and it will be marked as skipped. + """ diff --git a/tests/api_fastapi/execution_api/conftest.py b/tests/api_fastapi/execution_api/conftest.py new file mode 100644 index 000000000000..784cb29249a6 --- /dev/null +++ b/tests/api_fastapi/execution_api/conftest.py @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest +from fastapi.testclient import TestClient + +from airflow.api_fastapi.app import cached_app + + +@pytest.fixture +def client(): + return TestClient(cached_app(apps="execution")) diff --git a/tests/api_fastapi/execution_api/routes/test_task_instance.py b/tests/api_fastapi/execution_api/routes/test_task_instance.py new file mode 100644 index 000000000000..602ed1fbd25a --- /dev/null +++ b/tests/api_fastapi/execution_api/routes/test_task_instance.py @@ -0,0 +1,194 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from unittest import mock + +import pytest +from sqlalchemy import select +from sqlalchemy.exc import SQLAlchemyError + +from airflow.models.taskinstance import TaskInstance +from airflow.utils import timezone +from airflow.utils.state import State + +from tests_common.test_utils.db import clear_db_runs + +pytestmark = pytest.mark.db_test + + +DEFAULT_START_DATE = timezone.parse("2024-10-31T11:00:00Z") +DEFAULT_END_DATE = timezone.parse("2024-10-31T12:00:00Z") + + +class TestTIUpdateState: + def setup_method(self): + clear_db_runs() + + def teardown_method(self): + clear_db_runs() + + def test_ti_update_state_to_running(self, client, session, create_task_instance): + """ + Test that the Task Instance state is updated to running when the Task Instance is in a state where it can be + marked as running. + """ + + ti = create_task_instance( + task_id="test_ti_update_state_to_running", + state=State.QUEUED, + session=session, + ) + + session.commit() + + response = client.patch( + f"/execution/task_instance/{ti.id}/state", + json={ + "state": "running", + "hostname": "random-hostname", + "unixname": "random-unixname", + "pid": 100, + "start_date": "2024-10-31T12:00:00Z", + }, + ) + + assert response.status_code == 204 + assert response.text == "" + + # Refresh the Task Instance from the database so that we can check the updated values + session.refresh(ti) + assert ti.state == State.RUNNING + assert ti.hostname == "random-hostname" + assert ti.unixname == "random-unixname" + assert ti.pid == 100 + assert ti.start_date.isoformat() == "2024-10-31T12:00:00+00:00" + + def test_ti_update_state_conflict_if_not_queued(self, client, session, create_task_instance): + """ + Test that a 409 error is returned when the Task Instance is not in a state where it can be marked as + running. In this case, the Task Instance is first in NONE state so it cannot be marked as running. + """ + ti = create_task_instance( + task_id="test_ti_update_state_conflict_if_not_queued", + state=State.NONE, + ) + session.commit() + + response = client.patch( + f"/execution/task_instance/{ti.id}/state", + json={ + "state": "running", + "hostname": "random-hostname", + "unixname": "random-unixname", + "pid": 100, + "start_date": "2024-10-31T12:00:00Z", + }, + ) + + assert response.status_code == 409 + assert response.json() == { + "detail": { + "message": "TI was not in a state where it could be marked as running", + "previous_state": State.NONE, + "reason": "invalid_state", + } + } + + assert session.scalar(select(TaskInstance.state).where(TaskInstance.id == ti.id)) == State.NONE + + @pytest.mark.parametrize( + ("state", "end_date", "expected_state"), + [ + (State.SUCCESS, DEFAULT_END_DATE, State.SUCCESS), + (State.FAILED, DEFAULT_END_DATE, State.FAILED), + (State.SKIPPED, DEFAULT_END_DATE, State.SKIPPED), + ], + ) + def test_ti_update_state_to_terminal( + self, client, session, create_task_instance, state, end_date, expected_state + ): + ti = create_task_instance( + task_id="test_ti_update_state_to_terminal", + start_date=DEFAULT_START_DATE, + state=State.RUNNING, + ) + session.commit() + + response = client.patch( + f"/execution/task_instance/{ti.id}/state", + json={ + "state": state, + "end_date": end_date.isoformat(), + }, + ) + + assert response.status_code == 204 + assert response.text == "" + + session.expire_all() + + ti = session.get(TaskInstance, ti.id) + assert ti.state == expected_state + assert ti.end_date == end_date + + def test_ti_update_state_not_found(self, client, session): + """ + Test that a 404 error is returned when the Task Instance does not exist. + """ + task_instance_id = "0182e924-0f1e-77e6-ab50-e977118bc139" + + # Pre-condition: the Task Instance does not exist + assert session.scalar(select(TaskInstance.id).where(TaskInstance.id == task_instance_id)) is None + + payload = {"state": "success", "end_date": "2024-10-31T12:30:00Z"} + + response = client.patch(f"/execution/task_instance/{task_instance_id}/state", json=payload) + assert response.status_code == 404 + assert response.json()["detail"] == { + "reason": "not_found", + "message": "Task Instance not found", + } + + def test_ti_update_state_database_error(self, client, session, create_task_instance): + """ + Test that a database error is handled correctly when updating the Task Instance state. + """ + ti = create_task_instance( + task_id="test_ti_update_state_database_error", + state=State.QUEUED, + ) + session.commit() + payload = { + "state": "running", + "hostname": "random-hostname", + "unixname": "random-unixname", + "pid": 100, + "start_date": "2024-10-31T12:00:00Z", + } + + with mock.patch( + "airflow.api_fastapi.execution_api.routes.task_instance.Session.execute", + side_effect=[ + mock.Mock(one=lambda: ("queued",)), # First call returns "queued" + SQLAlchemyError("Database error"), # Second call raises an error + ], + ): + response = client.patch(f"/execution/task_instance/{ti.id}/state", json=payload) + assert response.status_code == 500 + assert response.json()["detail"] == "Database error occurred" From da502427b7204fac5f34ecf138aada37530258c2 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 5 Nov 2024 14:08:25 +0000 Subject: [PATCH 038/137] Bump `uv` to `0.4.30` (#43692) https://pypi.org/project/uv/0.4.30/ --- .github/actions/install-pre-commit/action.yml | 2 +- Dockerfile | 2 +- Dockerfile.ci | 2 +- dev/breeze/doc/ci/02_images.md | 2 +- .../commands/release_management_commands.py | 2 +- .../src/airflow_breeze/global_constants.py | 2 +- dev/breeze/uv.lock | 42 +++++++++---------- 7 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index 02eea2c72291..aa1dee87aa5f 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -24,7 +24,7 @@ inputs: default: 3.9 uv-version: description: 'uv version to use' - default: 0.4.29 + default: 0.4.30 pre-commit-version: description: 'pre-commit version to use' default: 4.0.1 diff --git a/Dockerfile b/Dockerfile index 25ceb8630776..482e45c73c1c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,7 +55,7 @@ ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=24.3.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.4.29 +ARG AIRFLOW_UV_VERSION=0.4.30 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" diff --git a/Dockerfile.ci b/Dockerfile.ci index 6ddf2f4e1ac4..3dce849b61a5 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1373,7 +1373,7 @@ RUN bash /scripts/docker/install_packaging_tools.sh; \ # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=24.3.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.4.29 +ARG AIRFLOW_UV_VERSION=0.4.30 ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 66f7b6121230..de1165aec6c2 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -448,7 +448,7 @@ can be used for CI images: | `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | | `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | | `AIRFLOW_PIP_VERSION` | `24.3.1` | PIP version used. | -| `AIRFLOW_UV_VERSION` | `0.4.29` | UV version used. | +| `AIRFLOW_UV_VERSION` | `0.4.30` | UV version used. | | `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | | `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 6955d38e0eb7..b7e8c1baba19 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -230,7 +230,7 @@ class VersionedFile(NamedTuple): AIRFLOW_PIP_VERSION = "24.3.1" -AIRFLOW_UV_VERSION = "0.4.29" +AIRFLOW_UV_VERSION = "0.4.30" AIRFLOW_USE_UV = False # TODO: automate thsese as well WHEEL_VERSION = "0.44.0" diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index a674b142b3c3..03b0316564ef 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -184,7 +184,7 @@ ALLOWED_INSTALL_MYSQL_CLIENT_TYPES = ["mariadb", "mysql"] PIP_VERSION = "24.3.1" -UV_VERSION = "0.4.29" +UV_VERSION = "0.4.30" DEFAULT_UV_HTTP_TIMEOUT = 300 DEFAULT_WSL2_HTTP_TIMEOUT = 900 diff --git a/dev/breeze/uv.lock b/dev/breeze/uv.lock index 0aed5ae9cd12..d1095b2bf9f6 100644 --- a/dev/breeze/uv.lock +++ b/dev/breeze/uv.lock @@ -1717,27 +1717,27 @@ wheels = [ [[package]] name = "uv" -version = "0.4.29" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/23/6e8d8177112b40d4905a49c03d397c5b93eb030f87cdddf0c5d4be599fc9/uv-0.4.29.tar.gz", hash = "sha256:9c559b6fdc042add463e86afa1c210716f7020bfc2e96b00df5af7afcb587ce7", size = 2102901 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/8d/78b6927a3e511a4bc05347714c8917896477537bf09a6301e84de08b7a59/uv-0.4.29-py3-none-linux_armv6l.whl", hash = "sha256:287dc3fd3f78093a5a82136f01cbd9f224e0905b38d3dcffdc96c08fbbe48ee9", size = 13250618 }, - { url = "https://files.pythonhosted.org/packages/d8/2f/1bbfc3c15933fcf07c222e063044696320f5a9fe3d5c584960ed0c490cf8/uv-0.4.29-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6b03859068aaa08ca9907a51d403d54b0a9d8054091646845a9192f213f099d4", size = 13316211 }, - { url = "https://files.pythonhosted.org/packages/fb/1a/1c862cc36f29cf58b22758f31eb5f9611ee86429d470c8e4c0fd235592ec/uv-0.4.29-py3-none-macosx_11_0_arm64.whl", hash = "sha256:950bbfe1954e9c3a5d6c4777bb778b4c23d0dea9ad9f77622c45d4fbba433355", size = 12363705 }, - { url = "https://files.pythonhosted.org/packages/a1/0e/76e947db1135fa2436b11cc1ca927de187601be7ec65b0102f42a6a58211/uv-0.4.29-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:3473b05142ba436ac30d036b7ab5e9bcfa97f63df5d1382f92e0a3e4aaa391bc", size = 12622825 }, - { url = "https://files.pythonhosted.org/packages/41/3d/b54226b11eb935e4e57585905cf3ded2ac7d972c551bef1c3a000d4c5e47/uv-0.4.29-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7060dfbad0bc26e9cecbb4f8482445c958071511f23728948478f81acfb29048", size = 13054445 }, - { url = "https://files.pythonhosted.org/packages/bf/00/02fa712a3991957d2a65d043173d06d3a429acb3c4e54976f4385c034d97/uv-0.4.29-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df35d9cbe4cfbb7bce287f56e3bb7a7cef0b7b5173ed889d936d4c470f2b1b83", size = 13655646 }, - { url = "https://files.pythonhosted.org/packages/61/85/f6796032396bbd350648747c984376c8c8add14c75476ed8d5a3438a9c76/uv-0.4.29-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfb797a87b55d96cc0593e9f29ab5d58454be74598ea0158e1b2f4f2dc97cede", size = 14281147 }, - { url = "https://files.pythonhosted.org/packages/17/48/3314a03c6580d0b05bd1b9122ff9a9fbde5163815cf84f5a220fc013cea1/uv-0.4.29-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:668d3e6095c6f0cac6a831ef4030f7ad79442d1c84b9569f01f50b60c2d51a77", size = 14004714 }, - { url = "https://files.pythonhosted.org/packages/11/e0/456bc5271f09ff385c57570628705757a59f9a3f8205ff029dc9b2213dbd/uv-0.4.29-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0be21afa0e582ddc5badff6ef40c3c6784efc5feae4ad568307b668d40dc49bd", size = 18032241 }, - { url = "https://files.pythonhosted.org/packages/ef/6c/db10ff7f178ee93a832941e1cddbf38bfb1b0e30fd07580db10eb909f19d/uv-0.4.29-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6224a322267570e0470c61008fd1c8e2f50bf073b339f4c3010da86aef3c44c", size = 13787528 }, - { url = "https://files.pythonhosted.org/packages/1b/cf/501cd6aeeae0413e83ed0c112a362e44c05fa01144ecfd05c6fb3533778d/uv-0.4.29-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:24cccff9c248864ba0ab3429bae56314146c9494ce66a881d70ea8cf2805945f", size = 12789635 }, - { url = "https://files.pythonhosted.org/packages/8d/8d/3103af713c6369b6c1afe2bd8415eb43ea2cd4d11aa823f2e5747736b410/uv-0.4.29-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:68d4967b5f0af8bd46085e0f3ded229026700668a97734a21c3d11a5fc350c47", size = 13022589 }, - { url = "https://files.pythonhosted.org/packages/4f/4d/e9a0da7c43301f27503ed0af881afb9059e3700bd374d1c7c6579ff9fb29/uv-0.4.29-py3-none-musllinux_1_1_i686.whl", hash = "sha256:75927da78f74bb935314d236dc61ecdc192e878e06eb79585b6d9d5ee9829f98", size = 13367805 }, - { url = "https://files.pythonhosted.org/packages/be/70/a78cd7cdac7581cf0a7e027cf3c69d07ca5b6b83d39f571411cc73f1590f/uv-0.4.29-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:246da468ac0d51e7fb257cd038db2f8d6376ae269a44d01f56776e32108aa9da", size = 15158094 }, - { url = "https://files.pythonhosted.org/packages/e6/93/3bcb18a54a9823c8bfadd362022b1c480da10c0bcd86398101f9a124e0a7/uv-0.4.29-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:8c71663c7df4f512c697de39a4926dc191897f5fede73644bb2329f532c1ebfa", size = 13917229 }, - { url = "https://files.pythonhosted.org/packages/8a/38/bd90e265f868ddbc2dd3cc9625e2d8670d3ac35984a078491be11be754f3/uv-0.4.29-py3-none-win32.whl", hash = "sha256:b5775db128b98251c3ea7874367fc20dce9f9aac3dbfa635e3ef4a1c56842d9c", size = 13203439 }, - { url = "https://files.pythonhosted.org/packages/cb/4f/446a0fe5901b110093f3888e93c8ebee1b08f35ba1699bbaf3645b553865/uv-0.4.29-py3-none-win_amd64.whl", hash = "sha256:67dcfd253020e25ed1c49e5bd06406205c37264f99e14002de53a357cd1cdadf", size = 14902665 }, +version = "0.4.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/66/8191736201d0b503f75cc5682e5d1a47e0e4fe55f5616605af8727e2c9de/uv-0.4.30.tar.gz", hash = "sha256:d9de718380e2f167243ca5e1dccea781e06404158442491255fec5955d57fed9", size = 2126167 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/67/f8eefd7499740fc5c2764574ad2d577a50d925c506e74cd0557c2d64f05b/uv-0.4.30-py3-none-linux_armv6l.whl", hash = "sha256:4ddad09385221fa5c609169e4a0dd5bee27cf56c1dc450d4cdc113122c54bb09", size = 13447487 }, + { url = "https://files.pythonhosted.org/packages/bb/07/9e8f09a4f93fd3cda20e635392994bf15c79ec5c853b5d3fe001b8259ef6/uv-0.4.30-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f63d6646acdf2f38a5afca9fb9eeac62efa663a57f3c134f735a5f575b4e748f", size = 13478492 }, + { url = "https://files.pythonhosted.org/packages/67/37/8994c3d0be99851a21a6ee01bbf3cb35ddc4b202a2f6f4014098d5893660/uv-0.4.30-py3-none-macosx_11_0_arm64.whl", hash = "sha256:353617bfcf72e1eabade426d83fb86a69d11273d1612aabc3f4566d41c596c97", size = 12467039 }, + { url = "https://files.pythonhosted.org/packages/0a/bc/c5fc5ede7f073c850fe61d1b35d45d45936bd212a188a513e319d11e450c/uv-0.4.30-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:dedcae3619f0eb181459b597fefefd99cb21fe5a5a48a530be6f5ad934399bfb", size = 12740841 }, + { url = "https://files.pythonhosted.org/packages/a1/a7/a728622e0990ba8fe5188387c7a21218e605f00297c6466ecd4caff068e4/uv-0.4.30-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:232575f30ed971ea32d4a525b7146c4b088a07ed6e70a31da63792d563fcac44", size = 13257182 }, + { url = "https://files.pythonhosted.org/packages/53/08/eb5283f4fb758537f18d5dfbb0f8dae3198be9f091e7a66d016a6a8c0b5c/uv-0.4.30-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c89f2eff63a08d04e81629611f43b1ffa668af6de0382b95a71599af7d4b77c", size = 13817386 }, + { url = "https://files.pythonhosted.org/packages/43/28/b1b914c67807cd05d0e0ffe682d82335fa9d222ebd271553aa423b34b734/uv-0.4.30-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d41d09cabba1988728c2d9b9ad25f79233c2aa3d6ecd724c36f4678c4c89711", size = 14417701 }, + { url = "https://files.pythonhosted.org/packages/0a/5d/fa1294dec14271be15affd420bdbba415dbc7e3db5b63719f8fb6d5cef34/uv-0.4.30-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ed0183e747065b9b1bcfb699ff10df671ebe6259709ce83e709f86cea564aee", size = 14163236 }, + { url = "https://files.pythonhosted.org/packages/ff/ce/e2fedbfcf055f79dd8c6e827d130bb8b9f2fd0841a6a0973baca8bdee242/uv-0.4.30-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e17a799c6279800996828e10288ca8ccc40cc883d8998802b938aa671dfa9ce", size = 18250185 }, + { url = "https://files.pythonhosted.org/packages/3b/36/592477b62bbd1d652ec2d45a5a6daba7ed5a6ce008690eb0749e18733adb/uv-0.4.30-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63196143f45018364c450ba94279a5bcff8562c14ba63deb41a92ed30baa6e22", size = 13953259 }, + { url = "https://files.pythonhosted.org/packages/f0/a1/4eb54d4b2809cb6b896881609d8620321f9907d052afee3111f72a50d16c/uv-0.4.30-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6395820540f368f622e818735862abd633dfe7e729c450fca56b65bab4b46661", size = 12941390 }, + { url = "https://files.pythonhosted.org/packages/e4/68/e963aa4c235151f8f91442ffeb734642fa9d139630b5bcdb77719c84638f/uv-0.4.30-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:1a83df281c5d900b4758b1a3969b3cff57231f9027db8508b71dce1f2da78684", size = 13209967 }, + { url = "https://files.pythonhosted.org/packages/86/10/b72965bf44de9f31f5031efe9abad871b22c05884092314da4eb1233d0f0/uv-0.4.30-py3-none-musllinux_1_1_i686.whl", hash = "sha256:4aecd9fb39cf018e129627090a1d35af2b0184bb87078d573c9998f5e4072416", size = 13559034 }, + { url = "https://files.pythonhosted.org/packages/3a/58/2ed027ea9ae017d16a78f0b49e738f2df36ce67d2c1c836fcf442731170c/uv-0.4.30-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:444468ad0e94b35cbf6acfc8a28589cfe1247136d43895e60a18955ff89a07ad", size = 15433457 }, + { url = "https://files.pythonhosted.org/packages/2b/db/b45b2d1470e39961e7d612f1f2ecd815de9b0fdd3298fbf14ef770863dbc/uv-0.4.30-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:ea55ca0fe5bdd04e46deaf395b3daf4fa92392f774e83610d066a2b272af5d3f", size = 14062977 }, + { url = "https://files.pythonhosted.org/packages/39/ee/1bac3464ae9c666c974a03e673a8cbb36023783a9c07de24d8a5e0473c4e/uv-0.4.30-py3-none-win32.whl", hash = "sha256:7f09bd6a853767863e2fb905f0eb1a0ed7afa9ea118852e5c02d2b451944e1cf", size = 13377566 }, + { url = "https://files.pythonhosted.org/packages/7b/05/3b42d33752cc0085369b4320e05ff667617de5a570be7cb358c6150ca046/uv-0.4.30-py3-none-win_amd64.whl", hash = "sha256:44c5aeb5b374f9fd1083959934daa9020db3610f0405198c5e3d8ec1f23d961d", size = 15022847 }, ] [[package]] From 1116f286eec3c233fe45fe66a3e4515c42501b75 Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Tue, 5 Nov 2024 15:19:24 +0100 Subject: [PATCH 039/137] AIP-65: Add DAG versioning support (#42913) * AIP-65: Add DAG versioning support This commit introduces versioning for DAGs Changes: - Introduced DagVersion model to handle versioning of DAGs. - Added version_name field to DAG for use in tracking the dagversion by users - Added support for version retrieval in the get_dag_source API endpoint - Modified DAG execution logic to reference dag_version_id instead of the dag_hash to ensure DAG runs are linked to specific versions. Fix tests revert RESTAPI changes * fixup! AIP-65: Add DAG versioning support * fixup! fixup! AIP-65: Add DAG versioning support * fix migration * fix test * more test fixes * update query count * fix static checks * Fix query and add created_at to dag_version table * improve code * Change to using UUID for primary keys * DagCode.bulk_write_code is no longer used * fixup! Change to using UUID for primary keys * fix tests * fixup! fix tests * use uuid for version_name * fixup! use uuid for version_name * use row lock when writing dag version * use row lock when writing dag version * fixup! use row lock when writing dag version * deactivating dag should not remove serialized dags * save version_name as string not uuid * Make dag_version_id unique * fixup! Make dag_version_id unique * Fix tests * Use uuid7 * fix test * fixup! fix test * use binary=False for uuid field to fix sqlite issue * apply suggestions from code review * Remove unnecessary version_name on dagmodel * Fix sqlalchemy 2 warning * Fix conflicts * Apply suggestions from code review Co-authored-by: Jed Cunningham <66968678+jedcunningham@users.noreply.github.com> * fixup! Apply suggestions from code review * fixup! fixup! Apply suggestions from code review * add test for dagversion model and make version_name, number and dag_id unique * Remove commented test as serdag can no longer disappear * Add SQLAlchemy-utils to requirements * mark test_dag_version.py as db_test * make version_name nullable * Apply suggestions from code review * fixup! Apply suggestions from code review * remove file_updater * Use dag_version for creating dagruns instead of dag_version_id * fix conflicts * use if TYPE_CHECKING * Add docstrings to methods * Move getting latest serdags to SerializedDagModel --- airflow/api/common/trigger_dag.py | 5 +- .../endpoints/dag_run_endpoint.py | 5 +- airflow/dag_processing/manager.py | 15 - .../example_dags/plugins/event_listener.py | 5 +- airflow/jobs/scheduler_job_runner.py | 22 +- .../versions/0047_3_0_0_add_dag_versioning.py | 151 + airflow/models/__init__.py | 1 + airflow/models/backfill.py | 6 +- airflow/models/dag.py | 26 +- airflow/models/dag_version.py | 167 + airflow/models/dagbag.py | 3 +- airflow/models/dagcode.py | 117 +- airflow/models/dagrun.py | 22 +- airflow/models/serialized_dag.py | 141 +- airflow/models/taskinstance.py | 14 +- airflow/models/taskinstancehistory.py | 2 + airflow/serialization/pydantic/dag_run.py | 2 +- airflow/serialization/schema.json | 1 + airflow/utils/db.py | 2 +- airflow/www/views.py | 4 +- docs/apache-airflow/img/airflow_erd.sha256 | 2 +- docs/apache-airflow/img/airflow_erd.svg | 3942 +++++++++-------- docs/apache-airflow/migrations-ref.rst | 4 +- hatch_build.py | 1 + .../api_endpoints/test_dag_run_endpoint.py | 1 + .../pre_commit/check_ti_vs_tis_attributes.py | 1 + task_sdk/src/airflow/sdk/definitions/dag.py | 6 + .../endpoints/test_dag_run_endpoint.py | 5 +- .../endpoints/test_task_endpoint.py | 6 +- tests/cli/commands/test_task_command.py | 2 +- tests/dag_processing/test_job_runner.py | 24 +- tests/dag_processing/test_processor.py | 1 + tests/jobs/test_scheduler_job.py | 219 +- tests/models/test_dag.py | 24 +- tests/models/test_dag_version.py | 113 + tests/models/test_dagbag.py | 14 +- tests/models/test_dagcode.py | 110 +- tests/models/test_dagrun.py | 1 + tests/models/test_serialized_dag.py | 111 +- tests/models/test_taskinstance.py | 4 +- tests/operators/test_trigger_dagrun.py | 13 +- tests/sensors/test_external_task_sensor.py | 3 +- tests/utils/test_db_cleanup.py | 1 + tests/www/views/test_views_tasks.py | 36 +- tests_common/pytest_plugin.py | 59 +- tests_common/test_utils/db.py | 1 + 46 files changed, 3049 insertions(+), 2366 deletions(-) create mode 100644 airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py create mode 100644 airflow/models/dag_version.py create mode 100644 tests/models/test_dag_version.py diff --git a/airflow/api/common/trigger_dag.py b/airflow/api/common/trigger_dag.py index b18957261f3a..44beae3f1f78 100644 --- a/airflow/api/common/trigger_dag.py +++ b/airflow/api/common/trigger_dag.py @@ -25,6 +25,7 @@ from airflow.api_internal.internal_api_call import internal_api_call from airflow.exceptions import DagNotFound, DagRunAlreadyExists from airflow.models import DagBag, DagModel, DagRun +from airflow.models.dag_version import DagVersion from airflow.utils import timezone from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState @@ -92,14 +93,14 @@ def _trigger_dag( run_conf = None if conf: run_conf = conf if isinstance(conf, dict) else json.loads(conf) - + dag_version = DagVersion.get_latest_version(dag.dag_id) dag_run = dag.create_dagrun( run_id=run_id, execution_date=execution_date, state=DagRunState.QUEUED, conf=run_conf, external_trigger=True, - dag_hash=dag_bag.dags_hash.get(dag_id), + dag_version=dag_version, data_interval=data_interval, triggered_by=triggered_by, ) diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 8ebb2b44e2bb..6a38eb27ff45 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -61,6 +61,7 @@ from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.exceptions import ParamValidationError from airflow.models import DagModel, DagRun +from airflow.models.dag_version import DagVersion from airflow.timetables.base import DataInterval from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.api_migration import mark_fastapi_migration_done @@ -341,7 +342,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: ) else: data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) - + dag_version = DagVersion.get_latest_version(dag.dag_id) dag_run = dag.create_dagrun( run_type=DagRunType.MANUAL, run_id=run_id, @@ -350,7 +351,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: state=DagRunState.QUEUED, conf=post_body.get("conf"), external_trigger=True, - dag_hash=get_airflow_app().dag_bag.dags_hash.get(dag_id), + dag_version=dag_version, session=session, triggered_by=DagRunTriggeredByType.REST_API, ) diff --git a/airflow/dag_processing/manager.py b/airflow/dag_processing/manager.py index 0f3441a5d4d1..3bc467e2f706 100644 --- a/airflow/dag_processing/manager.py +++ b/airflow/dag_processing/manager.py @@ -50,7 +50,6 @@ from airflow.models.dagwarning import DagWarning from airflow.models.db_callback_request import DbCallbackRequest from airflow.models.errors import ParseImportError -from airflow.models.serialized_dag import SerializedDagModel from airflow.secrets.cache import SecretCache from airflow.stats import Stats from airflow.traces.tracer import Trace, add_span @@ -539,10 +538,6 @@ def deactivate_stale_dags( if deactivated: cls.logger().info("Deactivated %i DAGs which are no longer present in file.", deactivated) - for dag_id in to_deactivate: - SerializedDagModel.remove_dag(dag_id) - cls.logger().info("Deleted DAG %s in serialized_dag table", dag_id) - def _run_parsing_loop(self): # In sync mode we want timeout=None -- wait forever until a message is received if self._async_mode: @@ -819,20 +814,10 @@ def _iter_dag_filelocs(fileloc: str) -> Iterator[str]: dag_filelocs = {full_loc for path in self._file_paths for full_loc in _iter_dag_filelocs(path)} - from airflow.models.dagcode import DagCode - - SerializedDagModel.remove_deleted_dags( - alive_dag_filelocs=dag_filelocs, - processor_subdir=self.get_dag_directory(), - ) DagModel.deactivate_deleted_dags( dag_filelocs, processor_subdir=self.get_dag_directory(), ) - DagCode.remove_deleted_code( - dag_filelocs, - processor_subdir=self.get_dag_directory(), - ) return True return False diff --git a/airflow/example_dags/plugins/event_listener.py b/airflow/example_dags/plugins/event_listener.py index 4b9be307c4e7..6d9fe2ff1173 100644 --- a/airflow/example_dags/plugins/event_listener.py +++ b/airflow/example_dags/plugins/event_listener.py @@ -164,9 +164,10 @@ def on_dag_run_running(dag_run: DagRun, msg: str): """ print("Dag run in running state") queued_at = dag_run.queued_at - dag_hash_info = dag_run.dag_hash - print(f"Dag information Queued at: {queued_at} hash info: {dag_hash_info}") + version = dag_run.dag_version.version + + print(f"Dag information Queued at: {queued_at} version: {version}") # [END howto_listen_dagrun_running_task] diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index fb85a4a73cc3..ffa250fc8144 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -54,10 +54,10 @@ ) from airflow.models.backfill import Backfill from airflow.models.dag import DAG, DagModel +from airflow.models.dag_version import DagVersion from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun from airflow.models.dagwarning import DagWarning, DagWarningType -from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance from airflow.stats import Stats from airflow.ti_deps.dependencies_states import EXECUTION_STATES @@ -1338,7 +1338,7 @@ def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) - self.log.error("DAG '%s' not found in serialized_dag table", dag_model.dag_id) continue - dag_hash = self.dagbag.dags_hash.get(dag.dag_id) + latest_dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) data_interval = dag.get_next_data_interval(dag_model) # Explicitly check if the DagRun already exists. This is an edge case @@ -1358,7 +1358,7 @@ def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) - data_interval=data_interval, external_trigger=False, session=session, - dag_hash=dag_hash, + dag_version=latest_dag_version, creating_job_id=self.job.id, triggered_by=DagRunTriggeredByType.TIMETABLE, ) @@ -1417,7 +1417,7 @@ def _create_dag_runs_asset_triggered( ) continue - dag_hash = self.dagbag.dags_hash.get(dag.dag_id) + latest_dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) # Explicitly check if the DagRun already exists. This is an edge case # where a Dag Run is created but `DagModel.next_dagrun` and `DagModel.next_dagrun_create_after` @@ -1472,7 +1472,7 @@ def _create_dag_runs_asset_triggered( state=DagRunState.QUEUED, external_trigger=False, session=session, - dag_hash=dag_hash, + dag_version=latest_dag_version, creating_job_id=self.job.id, triggered_by=DagRunTriggeredByType.ASSET, ) @@ -1750,18 +1750,20 @@ def _verify_integrity_if_dag_changed(self, dag_run: DagRun, session: Session) -> Return True if we determine that DAG still exists. """ - latest_version = SerializedDagModel.get_latest_version_hash(dag_run.dag_id, session=session) - if dag_run.dag_hash == latest_version: + latest_dag_version = DagVersion.get_latest_version(dag_run.dag_id, session=session) + if TYPE_CHECKING: + assert latest_dag_version + if dag_run.dag_version_id == latest_dag_version.id: self.log.debug("DAG %s not changed structure, skipping dagrun.verify_integrity", dag_run.dag_id) return True - dag_run.dag_hash = latest_version - # Refresh the DAG dag_run.dag = self.dagbag.get_dag(dag_id=dag_run.dag_id, session=session) if not dag_run.dag: return False + dag_run.dag_version = latest_dag_version + # Verify integrity also takes care of session.flush dag_run.verify_integrity(session=session) return True @@ -2041,7 +2043,6 @@ def _cleanup_stale_dags(self, session: Session = NEW_SESSION) -> None: In case one of DagProcessors is stopped (in case there are multiple of them for different dag folders), its dags are never marked as inactive. - Also remove dags from SerializedDag table. Executed on schedule only if [scheduler]standalone_dag_processor is True. """ self.log.debug("Checking dags not parsed within last %s seconds.", self._dag_stale_not_seen_duration) @@ -2056,7 +2057,6 @@ def _cleanup_stale_dags(self, session: Session = NEW_SESSION) -> None: self.log.info("Found (%d) stales dags not parsed after %s.", len(stale_dags), limit_lpt) for dag in stale_dags: dag.is_active = False - SerializedDagModel.remove_dag(dag_id=dag.dag_id, session=session) session.flush() @provide_session diff --git a/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py b/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py new file mode 100644 index 000000000000..12ffc174c750 --- /dev/null +++ b/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py @@ -0,0 +1,151 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +add dag versioning. + +Revision ID: 2b47dc6bc8df +Revises: d03e4a635aa3 +Create Date: 2024-10-09 05:44:04.670984 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op +from sqlalchemy_utils import UUIDType + +from airflow.migrations.db_types import StringID +from airflow.models.base import naming_convention +from airflow.utils import timezone +from airflow.utils.sqlalchemy import UtcDateTime + +# revision identifiers, used by Alembic. +revision = "2b47dc6bc8df" +down_revision = "d03e4a635aa3" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + """Apply add dag versioning.""" + op.create_table( + "dag_version", + sa.Column("id", UUIDType(binary=False), nullable=False), + sa.Column("version_number", sa.Integer(), nullable=False), + sa.Column("version_name", StringID()), + sa.Column("dag_id", StringID(), nullable=False), + sa.Column("created_at", UtcDateTime(), nullable=False, default=timezone.utcnow), + sa.ForeignKeyConstraint( + ("dag_id",), ["dag.dag_id"], name=op.f("dag_version_dag_id_fkey"), ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id", name=op.f("dag_version_pkey")), + sa.UniqueConstraint("dag_id", "version_number", name="dag_id_v_name_v_number_unique_constraint"), + ) + with op.batch_alter_table("dag_code", recreate="always", naming_convention=naming_convention) as batch_op: + batch_op.drop_constraint("dag_code_pkey", type_="primary") + batch_op.add_column( + sa.Column("id", UUIDType(binary=False), primary_key=True), insert_before="fileloc_hash" + ) + batch_op.create_primary_key("dag_code_pkey", ["id"]) + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False), nullable=False)) + batch_op.create_foreign_key( + batch_op.f("dag_code_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="CASCADE", + ) + batch_op.create_unique_constraint("dag_code_dag_version_id_uq", ["dag_version_id"]) + + with op.batch_alter_table( + "serialized_dag", recreate="always", naming_convention=naming_convention + ) as batch_op: + batch_op.drop_constraint("serialized_dag_pkey", type_="primary") + batch_op.add_column(sa.Column("id", UUIDType(binary=False), primary_key=True)) + batch_op.drop_index("idx_fileloc_hash") + batch_op.drop_column("fileloc_hash") + batch_op.drop_column("fileloc") + batch_op.create_primary_key("serialized_dag_pkey", ["id"]) + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False), nullable=False)) + batch_op.create_foreign_key( + batch_op.f("serialized_dag_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="CASCADE", + ) + batch_op.create_unique_constraint("serialized_dag_dag_version_id_uq", ["dag_version_id"]) + + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) + batch_op.create_foreign_key( + batch_op.f("task_instance_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) + + with op.batch_alter_table("dag_run", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) + batch_op.create_foreign_key( + batch_op.f("dag_run_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="CASCADE", + ) + batch_op.drop_column("dag_hash") + + +def downgrade(): + """Unapply add dag versioning.""" + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.drop_column("dag_version_id") + + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("task_instance_dag_version_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_version_id") + + with op.batch_alter_table("dag_code", schema=None) as batch_op: + batch_op.drop_column("id") + batch_op.drop_constraint(batch_op.f("dag_code_dag_version_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_version_id") + batch_op.create_primary_key("dag_code_pkey", ["fileloc_hash"]) + + with op.batch_alter_table("serialized_dag", schema=None, naming_convention=naming_convention) as batch_op: + batch_op.drop_column("id") + batch_op.add_column(sa.Column("fileloc", sa.String(length=2000), autoincrement=False, nullable=False)) + batch_op.add_column(sa.Column("fileloc_hash", sa.BIGINT(), autoincrement=False, nullable=False)) + batch_op.create_index("idx_fileloc_hash", ["fileloc_hash"], unique=False) + batch_op.create_primary_key("serialized_dag_pkey", ["dag_id"]) + batch_op.drop_constraint(batch_op.f("serialized_dag_dag_version_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_version_id") + + with op.batch_alter_table("dag_run", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_hash", sa.String(length=32), autoincrement=False, nullable=True)) + batch_op.drop_constraint(batch_op.f("dag_run_dag_version_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_version_id") + + op.drop_table("dag_version") diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 1ab4e5584c97..6d8803410532 100644 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -57,6 +57,7 @@ def import_all_models(): import airflow.models.asset import airflow.models.backfill + import airflow.models.dag_version import airflow.models.dagwarning import airflow.models.errors import airflow.models.serialized_dag diff --git a/airflow/models/backfill.py b/airflow/models/backfill.py index 648b35c5bdeb..11d677542fc0 100644 --- a/airflow/models/backfill.py +++ b/airflow/models/backfill.py @@ -43,6 +43,7 @@ from airflow.api_connexion.exceptions import NotFound from airflow.exceptions import AirflowException from airflow.models.base import Base, StringID +from airflow.models.dag_version import DagVersion from airflow.settings import json from airflow.utils import timezone from airflow.utils.session import create_session @@ -200,7 +201,7 @@ def _create_backfill_dag_run( ) ) return - + dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) dr = dag.create_dagrun( triggered_by=DagRunTriggeredByType.BACKFILL, execution_date=info.logical_date, @@ -213,6 +214,7 @@ def _create_backfill_dag_run( creating_job_id=None, session=session, backfill_id=backfill_id, + dag_version=dag_version, ) session.add( BackfillDagRun( @@ -253,7 +255,7 @@ def _create_backfill( from airflow.models.serialized_dag import SerializedDagModel with create_session() as session: - serdag = session.get(SerializedDagModel, dag_id) + serdag = session.scalar(SerializedDagModel.latest_item_select_object(dag_id)) if not serdag: raise NotFound(f"Could not find dag {dag_id}") # todo: if dag has no schedule, raise diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 337fc5c8163e..e6a67c6ad7e5 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -85,7 +85,7 @@ ) from airflow.models.base import Base, StringID from airflow.models.baseoperator import BaseOperator -from airflow.models.dagcode import DagCode +from airflow.models.dag_version import DagVersion from airflow.models.dagrun import RUN_ID_REGEX, DagRun from airflow.models.taskinstance import ( Context, @@ -257,7 +257,7 @@ def _create_orm_dagrun( conf, state, run_type, - dag_hash, + dag_version, creating_job_id, data_interval, backfill_id, @@ -273,7 +273,7 @@ def _create_orm_dagrun( conf=conf, state=state, run_type=run_type, - dag_hash=dag_hash, + dag_version=dag_version, creating_job_id=creating_job_id, data_interval=data_interval, triggered_by=triggered_by, @@ -424,6 +424,7 @@ class DAG(TaskSDKDag, LoggingMixin): **Warning**: A fail stop dag can only have tasks with the default trigger rule ("all_success"). An exception will be thrown if any task in a fail stop dag has a non default trigger rule. :param dag_display_name: The display name of the DAG which appears on the UI. + :param version_name: The version name to use in storing the dag to the DB. """ partial: bool = False @@ -1708,7 +1709,7 @@ def create_dagrun( conf: dict | None = None, run_type: DagRunType | None = None, session: Session = NEW_SESSION, - dag_hash: str | None = None, + dag_version: DagVersion | None = None, creating_job_id: int | None = None, data_interval: tuple[datetime, datetime] | None = None, backfill_id: int | None = None, @@ -1728,7 +1729,7 @@ def create_dagrun( :param conf: Dict containing configuration/parameters to pass to the DAG :param creating_job_id: id of the job creating this DagRun :param session: database session - :param dag_hash: Hash of Serialized DAG + :param dag_version: The DagVersion object for this run :param data_interval: Data interval of the DagRun :param backfill_id: id of the backfill run if one exists """ @@ -1800,7 +1801,7 @@ def create_dagrun( conf=conf, state=state, run_type=run_type, - dag_hash=dag_hash, + dag_version=dag_version, creating_job_id=creating_job_id, backfill_id=backfill_id, data_interval=data_interval, @@ -1833,7 +1834,6 @@ def bulk_write_to_db( orm_dags = dag_op.add_dags(session=session) dag_op.update_dags(orm_dags, processor_subdir=processor_subdir, session=session) - DagCode.bulk_sync_to_db((dag.fileloc for dag in dags), session=session) asset_op = AssetModelOperation.collect(dag_op.dags) @@ -2069,6 +2069,9 @@ class DagModel(Base): NUM_DAGS_PER_DAGRUN_QUERY = airflow_conf.getint( "scheduler", "max_dagruns_to_create_per_loop", fallback=10 ) + dag_versions = relationship( + "DagVersion", back_populates="dag_model", cascade="all, delete, delete-orphan" + ) def __init__(self, **kwargs): super().__init__(**kwargs) @@ -2275,9 +2278,10 @@ def dag_ready(dag_id: str, cond: BaseAsset, statuses: dict) -> bool | None: dag_statuses = {} for dag_id, records in by_dag.items(): dag_statuses[dag_id] = {x.asset.uri: True for x in records} - ser_dags = session.scalars( - select(SerializedDagModel).where(SerializedDagModel.dag_id.in_(dag_statuses.keys())) - ).all() + ser_dags = SerializedDagModel.get_latest_serialized_dags( + dag_ids=list(dag_statuses.keys()), session=session + ) + for ser_dag in ser_dags: dag_id = ser_dag.dag_id statuses = dag_statuses[dag_id] @@ -2452,6 +2456,7 @@ def _get_or_create_dagrun( if dr: session.delete(dr) session.commit() + dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) dr = dag.create_dagrun( state=DagRunState.RUNNING, execution_date=execution_date, @@ -2461,6 +2466,7 @@ def _get_or_create_dagrun( conf=conf, data_interval=data_interval, triggered_by=triggered_by, + dag_version=dag_version, ) log.info("created dagrun %s", dr) return dr diff --git a/airflow/models/dag_version.py b/airflow/models/dag_version.py new file mode 100644 index 000000000000..92511f93f5f6 --- /dev/null +++ b/airflow/models/dag_version.py @@ -0,0 +1,167 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +import uuid6 +from sqlalchemy import Column, ForeignKey, Integer, UniqueConstraint, select +from sqlalchemy.orm import relationship +from sqlalchemy_utils import UUIDType + +from airflow.models.base import Base, StringID +from airflow.utils import timezone +from airflow.utils.session import NEW_SESSION, provide_session +from airflow.utils.sqlalchemy import UtcDateTime, with_row_locks + +if TYPE_CHECKING: + from sqlalchemy.orm import Session + from sqlalchemy.sql import Select + +log = logging.getLogger(__name__) + + +class DagVersion(Base): + """Model to track the versions of DAGs in the database.""" + + __tablename__ = "dag_version" + id = Column(UUIDType(binary=False), primary_key=True, default=uuid6.uuid7) + version_number = Column(Integer, nullable=False, default=1) + version_name = Column(StringID()) + dag_id = Column(StringID(), ForeignKey("dag.dag_id", ondelete="CASCADE"), nullable=False) + dag_model = relationship("DagModel", back_populates="dag_versions") + dag_code = relationship( + "DagCode", + back_populates="dag_version", + uselist=False, + cascade="all, delete, delete-orphan", + cascade_backrefs=False, + ) + serialized_dag = relationship( + "SerializedDagModel", + back_populates="dag_version", + uselist=False, + cascade="all, delete, delete-orphan", + cascade_backrefs=False, + ) + dag_runs = relationship("DagRun", back_populates="dag_version", cascade="all, delete, delete-orphan") + task_instances = relationship("TaskInstance", back_populates="dag_version") + created_at = Column(UtcDateTime, default=timezone.utcnow) + + __table_args__ = ( + UniqueConstraint("dag_id", "version_number", name="dag_id_v_name_v_number_unique_constraint"), + ) + + def __repr__(self): + """Represent the object as a string.""" + return f"" + + @classmethod + @provide_session + def write_dag( + cls, + *, + dag_id: str, + version_name: str | None = None, + version_number: int = 1, + session: Session = NEW_SESSION, + ) -> DagVersion: + """ + Write a new DagVersion into database. + + Checks if a version of the DAG exists and increments the version number if it does. + + :param dag_id: The DAG ID. + :param version_name: The version name. + :param version_number: The version number. + :param session: The database session. + :return: The DagVersion object. + """ + existing_dag_version = session.scalar( + with_row_locks(cls._latest_version_select(dag_id), of=DagVersion, session=session, nowait=True) + ) + if existing_dag_version: + version_number = existing_dag_version.version_number + 1 + + dag_version = DagVersion( + dag_id=dag_id, + version_number=version_number, + version_name=version_name, + ) + log.debug("Writing DagVersion %s to the DB", dag_version) + session.add(dag_version) + # Flush is necessary here due to the unique constraint and other linked tables + session.flush() + log.debug("DagVersion %s written to the DB", dag_version) + return dag_version + + @classmethod + def _latest_version_select(cls, dag_id: str) -> Select: + """ + Get the select object to get the latest version of the DAG. + + :param dag_id: The DAG ID. + :return: The select object. + """ + return select(cls).where(cls.dag_id == dag_id).order_by(cls.created_at.desc()).limit(1) + + @classmethod + @provide_session + def get_latest_version(cls, dag_id: str, *, session: Session = NEW_SESSION) -> DagVersion | None: + """ + Get the latest version of the DAG. + + :param dag_id: The DAG ID. + :param session: The database session. + :return: The latest version of the DAG or None if not found. + """ + return session.scalar(cls._latest_version_select(dag_id)) + + @classmethod + @provide_session + def get_version( + cls, + dag_id: str, + version_number: int = 1, + *, + session: Session = NEW_SESSION, + ) -> DagVersion | None: + """ + Get the version of the DAG. + + :param dag_id: The DAG ID. + :param version_number: The version number. + :param session: The database session. + :return: The version of the DAG or None if not found. + """ + version_select_obj = ( + select(cls) + .where(cls.dag_id == dag_id, cls.version_number == version_number) + .order_by(cls.version_number.desc()) + .limit(1) + ) + return session.scalar(version_select_obj) + + @property + def version(self) -> str: + """A human-friendly representation of the version.""" + name = f"{self.version_number}" + if self.version_name: + name = f"{self.version_name}-{self.version_number}" + return name diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index c9ad8edaa401..5b57c7983ea1 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -650,13 +650,12 @@ def _serialize_dag_capturing_errors(dag, session, processor_subdir): ) log.debug("Calling the DAG.bulk_sync_to_db method") try: + DAG.bulk_write_to_db(dags.values(), processor_subdir=processor_subdir, session=session) # Write Serialized DAGs to DB, capturing errors for dag in dags.values(): serialize_errors.extend( _serialize_dag_capturing_errors(dag, session, processor_subdir) ) - - DAG.bulk_write_to_db(dags.values(), processor_subdir=processor_subdir, session=session) except OperationalError: session.rollback() raise diff --git a/airflow/models/dagcode.py b/airflow/models/dagcode.py index 321f819999bf..c78f6cafaa6f 100644 --- a/airflow/models/dagcode.py +++ b/airflow/models/dagcode.py @@ -17,26 +17,30 @@ from __future__ import annotations import logging -import os import struct -from datetime import datetime -from typing import TYPE_CHECKING, Collection, Iterable +from typing import TYPE_CHECKING, Collection -from sqlalchemy import BigInteger, Column, String, Text, delete, select +import uuid6 +from sqlalchemy import BigInteger, Column, ForeignKey, String, Text, delete, select from sqlalchemy.dialects.mysql import MEDIUMTEXT +from sqlalchemy.orm import relationship from sqlalchemy.sql.expression import literal +from sqlalchemy_utils import UUIDType from airflow.api_internal.internal_api_call import internal_api_call -from airflow.exceptions import AirflowException, DagCodeNotFound +from airflow.configuration import conf +from airflow.exceptions import DagCodeNotFound from airflow.models.base import Base from airflow.utils import timezone -from airflow.utils.file import correct_maybe_zipped, open_maybe_zipped +from airflow.utils.file import open_maybe_zipped from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: from sqlalchemy.orm import Session + from airflow.models.dag_version import DagVersion + log = logging.getLogger(__name__) @@ -50,84 +54,38 @@ class DagCode(Base): """ __tablename__ = "dag_code" - - fileloc_hash = Column(BigInteger, nullable=False, primary_key=True, autoincrement=False) + id = Column(UUIDType(binary=False), primary_key=True, default=uuid6.uuid7) + fileloc_hash = Column(BigInteger, nullable=False) fileloc = Column(String(2000), nullable=False) # The max length of fileloc exceeds the limit of indexing. last_updated = Column(UtcDateTime, nullable=False) source_code = Column(Text().with_variant(MEDIUMTEXT(), "mysql"), nullable=False) + dag_version_id = Column( + UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE"), nullable=False, unique=True + ) + dag_version = relationship("DagVersion", back_populates="dag_code", uselist=False) - def __init__(self, full_filepath: str, source_code: str | None = None): + def __init__(self, dag_version, full_filepath: str, source_code: str | None = None): + self.dag_version = dag_version self.fileloc = full_filepath self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) self.last_updated = timezone.utcnow() self.source_code = source_code or DagCode.code(self.fileloc) - @provide_session - def sync_to_db(self, session: Session = NEW_SESSION) -> None: - """ - Write code into database. - - :param session: ORM Session - """ - self.bulk_sync_to_db([self.fileloc], session) - @classmethod @provide_session - def bulk_sync_to_db(cls, filelocs: Iterable[str], session: Session = NEW_SESSION) -> None: + def write_dag(cls, dag_version: DagVersion, fileloc: str, session: Session = NEW_SESSION) -> DagCode: """ - Write code in bulk into database. + Write code into database. - :param filelocs: file paths of DAGs to sync + :param fileloc: file path of DAG to sync :param session: ORM Session """ - filelocs = set(filelocs) - filelocs_to_hashes = {fileloc: DagCode.dag_fileloc_hash(fileloc) for fileloc in filelocs} - existing_orm_dag_codes = session.scalars( - select(DagCode) - .filter(DagCode.fileloc_hash.in_(filelocs_to_hashes.values())) - .with_for_update(of=DagCode) - ).all() - - if existing_orm_dag_codes: - existing_orm_dag_codes_map = { - orm_dag_code.fileloc: orm_dag_code for orm_dag_code in existing_orm_dag_codes - } - else: - existing_orm_dag_codes_map = {} - - existing_orm_dag_codes_by_fileloc_hashes = {orm.fileloc_hash: orm for orm in existing_orm_dag_codes} - existing_orm_filelocs = {orm.fileloc for orm in existing_orm_dag_codes_by_fileloc_hashes.values()} - if not existing_orm_filelocs.issubset(filelocs): - conflicting_filelocs = existing_orm_filelocs.difference(filelocs) - hashes_to_filelocs = {DagCode.dag_fileloc_hash(fileloc): fileloc for fileloc in filelocs} - message = "" - for fileloc in conflicting_filelocs: - filename = hashes_to_filelocs[DagCode.dag_fileloc_hash(fileloc)] - message += ( - f"Filename '{filename}' causes a hash collision in the " - f"database with '{fileloc}'. Please rename the file." - ) - raise AirflowException(message) - - existing_filelocs = {dag_code.fileloc for dag_code in existing_orm_dag_codes} - missing_filelocs = filelocs.difference(existing_filelocs) - - for fileloc in missing_filelocs: - orm_dag_code = DagCode(fileloc, cls._get_code_from_file(fileloc)) - session.add(orm_dag_code) - - for fileloc in existing_filelocs: - current_version = existing_orm_dag_codes_by_fileloc_hashes[filelocs_to_hashes[fileloc]] - file_mod_time = datetime.fromtimestamp( - os.path.getmtime(correct_maybe_zipped(fileloc)), tz=timezone.utc - ) - - if file_mod_time > current_version.last_updated: - orm_dag_code = existing_orm_dag_codes_map[fileloc] - orm_dag_code.last_updated = file_mod_time - orm_dag_code.source_code = cls._get_code_from_file(orm_dag_code.fileloc) - session.merge(orm_dag_code) + log.debug("Writing DAG file %s into DagCode table", fileloc) + dag_code = DagCode(dag_version, fileloc, cls._get_code_from_file(fileloc)) + session.add(dag_code) + log.debug("DAG file %s written into DagCode table", fileloc) + return dag_code @classmethod @internal_api_call @@ -170,7 +128,9 @@ def has_dag(cls, fileloc: str, session: Session = NEW_SESSION) -> bool: """ fileloc_hash = cls.dag_fileloc_hash(fileloc) return ( - session.scalars(select(literal(True)).where(cls.fileloc_hash == fileloc_hash)).one_or_none() + session.scalars( + select(literal(True)).where(cls.fileloc_hash == fileloc_hash).limit(1) + ).one_or_none() is not None ) @@ -196,14 +156,25 @@ def code(cls, fileloc, session: Session = NEW_SESSION) -> str: @staticmethod def _get_code_from_file(fileloc): - with open_maybe_zipped(fileloc, "r") as f: - code = f.read() - return code + try: + with open_maybe_zipped(fileloc, "r") as f: + code = f.read() + return code + except FileNotFoundError: + test_mode = conf.get("core", "unit_test_mode") + if test_mode: + return "source_code" + raise @classmethod @provide_session def _get_code_from_db(cls, fileloc, session: Session = NEW_SESSION) -> str: - dag_code = session.scalar(select(cls).where(cls.fileloc_hash == cls.dag_fileloc_hash(fileloc))) + dag_code = session.scalar( + select(cls) + .where(cls.fileloc_hash == cls.dag_fileloc_hash(fileloc)) + .order_by(cls.last_updated.desc()) + .limit(1) + ) if not dag_code: raise DagCodeNotFound() else: diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 5de0466a6be0..635cd73ccd8d 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -48,6 +48,7 @@ from sqlalchemy.orm import declared_attr, joinedload, relationship, synonym, validates from sqlalchemy.sql.expression import case, false, select, true from sqlalchemy.sql.functions import coalesce +from sqlalchemy_utils import UUIDType from airflow import settings from airflow.api_internal.internal_api_call import internal_api_call @@ -59,6 +60,7 @@ from airflow.models.abstractoperator import NotMapped from airflow.models.backfill import Backfill from airflow.models.base import Base, StringID +from airflow.models.dag_version import DagVersion from airflow.models.expandinput import NotFullyPopulated from airflow.models.taskinstance import TaskInstance as TI from airflow.models.tasklog import LogTemplate @@ -144,7 +146,6 @@ class DagRun(Base, LoggingMixin): data_interval_end = Column(UtcDateTime) # When a scheduler last attempted to schedule TIs for this DagRun last_scheduling_decision = Column(UtcDateTime) - dag_hash = Column(String(32)) # Foreign key to LogTemplate. DagRun rows created prior to this column's # existence have this set to NULL. Later rows automatically populate this on # insert to point to the latest LogTemplate entry. @@ -164,6 +165,8 @@ class DagRun(Base, LoggingMixin): It's possible this could change if e.g. the dag run is cleared to be rerun, or perhaps re-backfilled. """ + dag_version_id = Column(UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE")) + dag_version = relationship("DagVersion", back_populates="dag_runs") # Remove this `if` after upgrading Sphinx-AutoAPI if not TYPE_CHECKING and "BUILDING_AIRFLOW_DOCS" in os.environ: @@ -231,11 +234,11 @@ def __init__( conf: Any | None = None, state: DagRunState | None = None, run_type: str | None = None, - dag_hash: str | None = None, creating_job_id: int | None = None, data_interval: tuple[datetime, datetime] | None = None, triggered_by: DagRunTriggeredByType | None = None, backfill_id: int | None = None, + dag_version: DagVersion | None = None, ): if data_interval is None: # Legacy: Only happen for runs created prior to Airflow 2.2. @@ -256,11 +259,11 @@ def __init__( else: self.queued_at = queued_at self.run_type = run_type - self.dag_hash = dag_hash self.creating_job_id = creating_job_id self.backfill_id = backfill_id self.clear_number = 0 self.triggered_by = triggered_by + self.dag_version = dag_version super().__init__() def __repr__(self): @@ -994,8 +997,9 @@ def recalculate(self) -> _UnfinishedStates: "DagRun Finished: dag_id=%s, execution_date=%s, run_id=%s, " "run_start_date=%s, run_end_date=%s, run_duration=%s, " "state=%s, external_trigger=%s, run_type=%s, " - "data_interval_start=%s, data_interval_end=%s, dag_hash=%s" + "data_interval_start=%s, data_interval_end=%s, dag_version_name=%s" ) + dagv = session.scalar(select(DagVersion).where(DagVersion.id == self.dag_version_id)) self.log.info( msg, self.dag_id, @@ -1013,7 +1017,7 @@ def recalculate(self) -> _UnfinishedStates: self.run_type, self.data_interval_start, self.data_interval_end, - self.dag_hash, + dagv.version if dagv else None, ) with Trace.start_span_from_dagrun(dagrun=self) as span: @@ -1037,7 +1041,7 @@ def recalculate(self) -> _UnfinishedStates: "run_type": str(self.run_type), "data_interval_start": str(self.data_interval_start), "data_interval_end": str(self.data_interval_end), - "dag_hash": str(self.dag_hash), + "dag_version": str(dagv.version if dagv else None), "conf": str(self.conf), } if span.is_recording(): @@ -1454,7 +1458,9 @@ def _get_task_creator( def create_ti_mapping(task: Operator, indexes: Iterable[int]) -> Iterator[dict[str, Any]]: created_counts[task.task_type] += 1 for map_index in indexes: - yield TI.insert_mapping(self.run_id, task, map_index=map_index) + yield TI.insert_mapping( + self.run_id, task, map_index=map_index, dag_version_id=self.dag_version_id + ) creator = create_ti_mapping @@ -1462,7 +1468,7 @@ def create_ti_mapping(task: Operator, indexes: Iterable[int]) -> Iterator[dict[s def create_ti(task: Operator, indexes: Iterable[int]) -> Iterator[TI]: for map_index in indexes: - ti = TI(task, run_id=self.run_id, map_index=map_index) + ti = TI(task, run_id=self.run_id, map_index=map_index, dag_version_id=self.dag_version_id) ti_mutation_hook(ti) created_counts[ti.operator] += 1 yield ti diff --git a/airflow/models/serialized_dag.py b/airflow/models/serialized_dag.py index 32be31d721e3..0d5667cd48fc 100644 --- a/airflow/models/serialized_dag.py +++ b/airflow/models/serialized_dag.py @@ -25,14 +25,17 @@ from typing import TYPE_CHECKING, Any, Collection import sqlalchemy_jsonfield -from sqlalchemy import BigInteger, Column, Index, LargeBinary, String, and_, exc, or_, select +import uuid6 +from sqlalchemy import Column, ForeignKey, LargeBinary, String, exc, or_, select from sqlalchemy.orm import backref, foreign, relationship from sqlalchemy.sql.expression import func, literal +from sqlalchemy_utils import UUIDType from airflow.api_internal.internal_api_call import internal_api_call from airflow.exceptions import TaskNotFound from airflow.models.base import ID_LEN, Base from airflow.models.dag import DagModel +from airflow.models.dag_version import DagVersion from airflow.models.dagcode import DagCode from airflow.models.dagrun import DagRun from airflow.serialization.dag_dependency import DagDependency @@ -76,19 +79,14 @@ class SerializedDagModel(Base): """ __tablename__ = "serialized_dag" - - dag_id = Column(String(ID_LEN), primary_key=True) - fileloc = Column(String(2000), nullable=False) - # The max length of fileloc exceeds the limit of indexing. - fileloc_hash = Column(BigInteger(), nullable=False) + id = Column(UUIDType(binary=False), primary_key=True, default=uuid6.uuid7) + dag_id = Column(String(ID_LEN), nullable=False) _data = Column("data", sqlalchemy_jsonfield.JSONField(json=json), nullable=True) _data_compressed = Column("data_compressed", LargeBinary, nullable=True) last_updated = Column(UtcDateTime, nullable=False) dag_hash = Column(String(32), nullable=False) processor_subdir = Column(String(2000), nullable=True) - __table_args__ = (Index("idx_fileloc_hash", fileloc_hash, unique=False),) - dag_runs = relationship( DagRun, primaryjoin=dag_id == foreign(DagRun.dag_id), # type: ignore @@ -103,13 +101,15 @@ class SerializedDagModel(Base): innerjoin=True, backref=backref("serialized_dag", uselist=False, innerjoin=True), ) + dag_version_id = Column( + UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE"), nullable=False, unique=True + ) + dag_version = relationship("DagVersion", back_populates="serialized_dag") load_op_links = True def __init__(self, dag: DAG, processor_subdir: str | None = None) -> None: self.dag_id = dag.dag_id - self.fileloc = dag.fileloc - self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) self.last_updated = timezone.utcnow() self.processor_subdir = processor_subdir @@ -194,7 +194,9 @@ def write_dag( log.debug("Checking if DAG (%s) changed", dag.dag_id) new_serialized_dag = cls(dag, processor_subdir) serialized_dag_db = session.execute( - select(cls.dag_hash, cls.processor_subdir).where(cls.dag_id == dag.dag_id) + select(cls.dag_hash, cls.processor_subdir) + .where(cls.dag_id == dag.dag_id) + .order_by(cls.last_updated.desc()) ).first() if ( @@ -204,12 +206,52 @@ def write_dag( ): log.debug("Serialized DAG (%s) is unchanged. Skipping writing to DB", dag.dag_id) return False - + dagv = DagVersion.write_dag( + version_name=dag.version_name, + dag_id=dag.dag_id, + session=session, + ) log.debug("Writing Serialized DAG: %s to the DB", dag.dag_id) - session.merge(new_serialized_dag) + new_serialized_dag.dag_version = dagv + session.add(new_serialized_dag) log.debug("DAG: %s written to the DB", dag.dag_id) + + DagCode.write_dag(dagv, dag.fileloc, session=session) return True + @classmethod + def latest_item_select_object(cls, dag_id): + return select(cls).where(cls.dag_id == dag_id).order_by(cls.last_updated.desc()).limit(1) + + @classmethod + @provide_session + def get_latest_serialized_dags( + cls, *, dag_ids: list[str], session: Session = NEW_SESSION + ) -> list[SerializedDagModel]: + """ + Get the latest serialized dags of given DAGs. + + :param dag_ids: The list of DAG IDs. + :param session: The database session. + :return: The latest serialized dag of the DAGs. + """ + # Subquery to get the latest serdag per dag_id + latest_serdag_subquery = ( + session.query(cls.dag_id, func.max(cls.last_updated).label("last_updated")) + .filter(cls.dag_id.in_(dag_ids)) + .group_by(cls.dag_id) + .subquery() + ) + latest_serdags = session.scalars( + select(cls) + .join( + latest_serdag_subquery, + cls.last_updated == latest_serdag_subquery.c.last_updated, + ) + .where(cls.dag_id.in_(dag_ids)) + ).all() + return latest_serdags or [] + @classmethod @provide_session def read_all_dags(cls, session: Session = NEW_SESSION) -> dict[str, SerializedDAG]: @@ -219,7 +261,18 @@ def read_all_dags(cls, session: Session = NEW_SESSION) -> dict[str, SerializedDA :param session: ORM Session :returns: a dict of DAGs read from database """ - serialized_dags = session.scalars(select(cls)) + latest_serialized_dag_subquery = ( + session.query(cls.dag_id, func.max(cls.last_updated).label("max_updated")) + .group_by(cls.dag_id) + .subquery() + ) + serialized_dags = session.scalars( + select(cls).join( + latest_serialized_dag_subquery, + (cls.dag_id == latest_serialized_dag_subquery.c.dag_id) + and (cls.last_updated == latest_serialized_dag_subquery.c.max_updated), + ) + ) dags = {} for row in serialized_dags: @@ -287,22 +340,17 @@ def remove_deleted_dags( :param processor_subdir: dag processor subdir :param session: ORM Session """ - alive_fileloc_hashes = [DagCode.dag_fileloc_hash(fileloc) for fileloc in alive_dag_filelocs] - log.debug( "Deleting Serialized DAGs (for which DAG files are deleted) from %s table ", cls.__tablename__ ) - + # Deleting the DagModel cascade deletes the serialized Dag through the dag version relationship session.execute( - cls.__table__.delete().where( - and_( - cls.fileloc_hash.notin_(alive_fileloc_hashes), - cls.fileloc.notin_(alive_dag_filelocs), - or_( - cls.processor_subdir.is_(None), - cls.processor_subdir == processor_subdir, - ), - ) + DagModel.__table__.delete().where( + DagModel.fileloc.notin_(alive_dag_filelocs), + or_( + DagModel.processor_subdir.is_(None), + DagModel.processor_subdir == processor_subdir, + ), ) ) @@ -334,11 +382,7 @@ def get(cls, dag_id: str, session: Session = NEW_SESSION) -> SerializedDagModel :param dag_id: the DAG to fetch :param session: ORM Session """ - row = session.scalar(select(cls).where(cls.dag_id == dag_id)) - if row: - return row - - return session.scalar(select(cls).where(cls.dag_id == dag_id)) + return session.scalar(cls.latest_item_select_object(dag_id)) @staticmethod @provide_session @@ -373,7 +417,9 @@ def get_last_updated_datetime(cls, dag_id: str, session: Session = NEW_SESSION) :param dag_id: DAG ID :param session: ORM Session """ - return session.scalar(select(cls.last_updated).where(cls.dag_id == dag_id)) + return session.scalar( + select(cls.last_updated).where(cls.dag_id == dag_id).order_by(cls.last_updated.desc()).limit(1) + ) @classmethod @provide_session @@ -395,7 +441,9 @@ def get_latest_version_hash(cls, dag_id: str, session: Session = NEW_SESSION) -> :param session: ORM Session :return: DAG Hash, or None if the DAG is not found """ - return session.scalar(select(cls.dag_hash).where(cls.dag_id == dag_id)) + return session.scalar( + select(cls.dag_hash).where(cls.dag_id == dag_id).order_by(cls.last_updated.desc()).limit(1) + ) @classmethod def get_latest_version_hash_and_updated_datetime( @@ -413,7 +461,10 @@ def get_latest_version_hash_and_updated_datetime( :return: A tuple of DAG Hash and last updated datetime, or None if the DAG is not found """ return session.execute( - select(cls.dag_hash, cls.last_updated).where(cls.dag_id == dag_id) + select(cls.dag_hash, cls.last_updated) + .where(cls.dag_id == dag_id) + .order_by(cls.last_updated.desc()) + .limit(1) ).one_or_none() @classmethod @@ -424,14 +475,27 @@ def get_dag_dependencies(cls, session: Session = NEW_SESSION) -> dict[str, list[ :param session: ORM Session """ + latest_sdag_subquery = ( + session.query(cls.dag_id, func.max(cls.last_updated).label("max_updated")) + .group_by(cls.dag_id) + .subquery() + ) if session.bind.dialect.name in ["sqlite", "mysql"]: query = session.execute( - select(cls.dag_id, func.json_extract(cls._data, "$.dag.dag_dependencies")) + select(cls.dag_id, func.json_extract(cls._data, "$.dag.dag_dependencies")).join( + latest_sdag_subquery, + (cls.dag_id == latest_sdag_subquery.c.dag_id) + and (cls.last_updated == latest_sdag_subquery.c.max_updated), + ) ) iterator = ((dag_id, json.loads(deps_data) if deps_data else []) for dag_id, deps_data in query) else: iterator = session.execute( - select(cls.dag_id, func.json_extract_path(cls._data, "dag", "dag_dependencies")) + select(cls.dag_id, func.json_extract_path(cls._data, "dag", "dag_dependencies")).join( + latest_sdag_subquery, + (cls.dag_id == latest_sdag_subquery.c.dag_id) + and (cls.last_updated == latest_sdag_subquery.c.max_updated), + ) ) return {dag_id: [DagDependency(**d) for d in (deps_data or [])] for dag_id, deps_data in iterator} @@ -439,10 +503,9 @@ def get_dag_dependencies(cls, session: Session = NEW_SESSION) -> dict[str, list[ @internal_api_call @provide_session def get_serialized_dag(dag_id: str, task_id: str, session: Session = NEW_SESSION) -> Operator | None: - from airflow.models.serialized_dag import SerializedDagModel - try: - model = session.get(SerializedDagModel, dag_id) + # get the latest version of the DAG + model = session.scalar(SerializedDagModel.latest_item_select_object(dag_id)) if model: return model.dag.get_task(task_id) except (exc.NoResultFound, TaskNotFound): diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index c525a40a14ab..a8a96a25f629 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -45,6 +45,7 @@ Column, DateTime, Float, + ForeignKey, ForeignKeyConstraint, Index, Integer, @@ -69,6 +70,7 @@ from sqlalchemy.orm import lazyload, reconstructor, relationship from sqlalchemy.orm.attributes import NO_VALUE, set_committed_value from sqlalchemy.sql.expression import case, select +from sqlalchemy_utils import UUIDType from airflow import settings from airflow.api_internal.internal_api_call import InternalApiConfig, internal_api_call @@ -821,6 +823,7 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.trigger_id = source.trigger_id target.next_method = source.next_method target.next_kwargs = source.next_kwargs + target.dag_version_id = source.dag_version_id if include_dag_run: target.execution_date = source.execution_date @@ -839,7 +842,7 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.dag_run.data_interval_start = source.dag_run.data_interval_start target.dag_run.data_interval_end = source.dag_run.data_interval_end target.dag_run.last_scheduling_decision = source.dag_run.last_scheduling_decision - target.dag_run.dag_hash = source.dag_run.dag_hash + target.dag_run.dag_version_id = source.dag_run.dag_version_id target.dag_run.updated_at = source.dag_run.updated_at target.dag_run.log_template_id = source.dag_run.log_template_id @@ -1876,8 +1879,10 @@ class TaskInstance(Base, LoggingMixin): next_kwargs = Column(MutableDict.as_mutable(ExtendedJSON)) _task_display_property_value = Column("task_display_name", String(2000), nullable=True) + dag_version_id = Column(UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE")) + dag_version = relationship("DagVersion", back_populates="task_instances") # If adding new fields here then remember to add them to - # refresh_from_db() or they won't display in the UI correctly + # _set_ti_attrs() or they won't display in the UI correctly __table_args__ = ( Index("ti_dag_state", dag_id, state), @@ -1942,11 +1947,13 @@ def __init__( run_id: str | None = None, state: str | None = None, map_index: int = -1, + dag_version_id: UUIDType | None = None, ): super().__init__() self.dag_id = task.dag_id self.task_id = task.task_id self.map_index = map_index + self.dag_version_id = dag_version_id self.refresh_from_task(task) if TYPE_CHECKING: assert self.task @@ -1978,7 +1985,7 @@ def stats_tags(self) -> dict[str, str]: return _stats_tags(task_instance=self) @staticmethod - def insert_mapping(run_id: str, task: Operator, map_index: int) -> dict[str, Any]: + def insert_mapping(run_id: str, task: Operator, map_index: int, dag_version_id: int) -> dict[str, Any]: """ Insert mapping. @@ -2007,6 +2014,7 @@ def insert_mapping(run_id: str, task: Operator, map_index: int) -> dict[str, Any "custom_operator_name": getattr(task, "custom_operator_name", None), "map_index": map_index, "_task_display_property_value": task.task_display_name, + "dag_version_id": dag_version_id, } @reconstructor diff --git a/airflow/models/taskinstancehistory.py b/airflow/models/taskinstancehistory.py index 8c77daf92579..e587cf083e3b 100644 --- a/airflow/models/taskinstancehistory.py +++ b/airflow/models/taskinstancehistory.py @@ -33,6 +33,7 @@ text, ) from sqlalchemy.ext.mutable import MutableDict +from sqlalchemy_utils import UUIDType from airflow.models.base import Base, StringID from airflow.utils import timezone @@ -91,6 +92,7 @@ class TaskInstanceHistory(Base): next_kwargs = Column(MutableDict.as_mutable(ExtendedJSON)) task_display_name = Column("task_display_name", String(2000), nullable=True) + dag_version_id = Column(UUIDType(binary=False)) def __init__( self, diff --git a/airflow/serialization/pydantic/dag_run.py b/airflow/serialization/pydantic/dag_run.py index fd12ca12c018..a0175e3749d9 100644 --- a/airflow/serialization/pydantic/dag_run.py +++ b/airflow/serialization/pydantic/dag_run.py @@ -52,7 +52,7 @@ class DagRunPydantic(BaseModelPydantic): data_interval_start: Optional[datetime] data_interval_end: Optional[datetime] last_scheduling_decision: Optional[datetime] - dag_hash: Optional[str] + dag_version_id: Optional[int] updated_at: Optional[datetime] dag: Optional[PydanticDag] consumed_asset_events: List[AssetEventPydantic] # noqa: UP006 diff --git a/airflow/serialization/schema.json b/airflow/serialization/schema.json index 32ccd3dfff9c..b26b59339816 100644 --- a/airflow/serialization/schema.json +++ b/airflow/serialization/schema.json @@ -158,6 +158,7 @@ }, "dag_display_name": { "type" : "string"}, "description": { "type" : "string"}, + "version_name": {"type": "string"}, "_concurrency": { "type" : "number"}, "max_active_tasks": { "type" : "number"}, "max_active_runs": { "type" : "number"}, diff --git a/airflow/utils/db.py b/airflow/utils/db.py index d23f54068b59..d5218b6050e6 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -97,7 +97,7 @@ class MappedClassProtocol(Protocol): "2.9.2": "686269002441", "2.10.0": "22ed7efa9da2", "2.10.3": "5f2621c13b39", - "3.0.0": "d03e4a635aa3", + "3.0.0": "2b47dc6bc8df", } diff --git a/airflow/www/views.py b/airflow/www/views.py index e287c027a894..cce2ec8b88c8 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -105,6 +105,7 @@ from airflow.models import Connection, DagModel, DagTag, Log, Trigger, XCom from airflow.models.asset import AssetDagRunQueue, AssetEvent, AssetModel, DagScheduleAssetReference from airflow.models.dag import get_asset_triggered_next_run_info +from airflow.models.dag_version import DagVersion from airflow.models.dagrun import RUN_ID_REGEX, DagRun, DagRunType from airflow.models.errors import ParseImportError from airflow.models.serialized_dag import SerializedDagModel @@ -2201,6 +2202,7 @@ def trigger(self, dag_id: str, session: Session = NEW_SESSION): ) try: + dag_version = DagVersion.get_latest_version(dag.dag_id) dag_run = dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=execution_date, @@ -2208,7 +2210,7 @@ def trigger(self, dag_id: str, session: Session = NEW_SESSION): state=DagRunState.QUEUED, conf=run_conf, external_trigger=True, - dag_hash=get_airflow_app().dag_bag.dags_hash.get(dag_id), + dag_version=dag_version, run_id=run_id, triggered_by=DagRunTriggeredByType.UI, ) diff --git a/docs/apache-airflow/img/airflow_erd.sha256 b/docs/apache-airflow/img/airflow_erd.sha256 index 572ce439c231..c8963880f842 100644 --- a/docs/apache-airflow/img/airflow_erd.sha256 +++ b/docs/apache-airflow/img/airflow_erd.sha256 @@ -1 +1 @@ -5ec1019b1b0f43b29fc83638c2a13c0bda90b7e4f0ff542aeab401bbfa9a83e4 \ No newline at end of file +f997746cdee45147831f81bcd2d43ec3ca45d7429afa691e385104987ed51d88 \ No newline at end of file diff --git a/docs/apache-airflow/img/airflow_erd.svg b/docs/apache-airflow/img/airflow_erd.svg index ba935dd6c4be..b3caf10cba3e 100644 --- a/docs/apache-airflow/img/airflow_erd.svg +++ b/docs/apache-airflow/img/airflow_erd.svg @@ -4,2271 +4,2353 @@ - - + + %3 - + log - -log - -id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - -dttm - - [TIMESTAMP] - -event - - [VARCHAR(60)] - -execution_date - - [TIMESTAMP] - -extra - - [TEXT] - -map_index - - [INTEGER] - -owner - - [VARCHAR(500)] - -owner_display_name - - [VARCHAR(500)] - -run_id - - [VARCHAR(250)] - -task_id - - [VARCHAR(250)] - -try_number - - [INTEGER] + +log + +id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + +dttm + + [TIMESTAMP] + +event + + [VARCHAR(60)] + +execution_date + + [TIMESTAMP] + +extra + + [TEXT] + +map_index + + [INTEGER] + +owner + + [VARCHAR(500)] + +owner_display_name + + [VARCHAR(500)] + +run_id + + [VARCHAR(250)] + +task_id + + [VARCHAR(250)] + +try_number + + [INTEGER] slot_pool - -slot_pool - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -include_deferred - - [BOOLEAN] - NOT NULL - -pool - - [VARCHAR(256)] - -slots - - [INTEGER] + +slot_pool + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +include_deferred + + [BOOLEAN] + NOT NULL + +pool + + [VARCHAR(256)] + +slots + + [INTEGER] callback_request - -callback_request - -id - - [INTEGER] - NOT NULL - -callback_data - - [JSON] - NOT NULL - -callback_type - - [VARCHAR(20)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -priority_weight - - [INTEGER] - NOT NULL - -processor_subdir - - [VARCHAR(2000)] + +callback_request + +id + + [INTEGER] + NOT NULL + +callback_data + + [JSON] + NOT NULL + +callback_type + + [VARCHAR(20)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +priority_weight + + [INTEGER] + NOT NULL + +processor_subdir + + [VARCHAR(2000)] dag_priority_parsing_request - -dag_priority_parsing_request - -id - - [VARCHAR(32)] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - - - -dag_code - -dag_code - -fileloc_hash - - [BIGINT] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL + +dag_priority_parsing_request + +id + + [VARCHAR(32)] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL - + connection - -connection - -id - - [INTEGER] - NOT NULL - -conn_id - - [VARCHAR(250)] - NOT NULL - -conn_type - - [VARCHAR(500)] - NOT NULL - -description - - [TEXT] - -extra - - [TEXT] - -host - - [VARCHAR(500)] - -is_encrypted - - [BOOLEAN] - -is_extra_encrypted - - [BOOLEAN] - -login - - [TEXT] - -password - - [TEXT] - -port - - [INTEGER] - -schema - - [VARCHAR(500)] + +connection + +id + + [INTEGER] + NOT NULL + +conn_id + + [VARCHAR(250)] + NOT NULL + +conn_type + + [VARCHAR(500)] + NOT NULL + +description + + [TEXT] + +extra + + [TEXT] + +host + + [VARCHAR(500)] + +is_encrypted + + [BOOLEAN] + +is_extra_encrypted + + [BOOLEAN] + +login + + [TEXT] + +password + + [TEXT] + +port + + [INTEGER] + +schema + + [VARCHAR(500)] - + variable - -variable - -id - - [INTEGER] - NOT NULL - -description - - [TEXT] - -is_encrypted - - [BOOLEAN] - -key - - [VARCHAR(250)] - -val - - [TEXT] + +variable + +id + + [INTEGER] + NOT NULL + +description + + [TEXT] + +is_encrypted + + [BOOLEAN] + +key + + [VARCHAR(250)] + +val + + [TEXT] - + import_error - -import_error - -id - - [INTEGER] - NOT NULL - -filename - - [VARCHAR(1024)] - -processor_subdir - - [VARCHAR(2000)] - -stacktrace - - [TEXT] - -timestamp - - [TIMESTAMP] + +import_error + +id + + [INTEGER] + NOT NULL + +filename + + [VARCHAR(1024)] + +processor_subdir + + [VARCHAR(2000)] + +stacktrace + + [TEXT] + +timestamp + + [TIMESTAMP] - + job - -job - -id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - -end_date - - [TIMESTAMP] - -executor_class - - [VARCHAR(500)] - -hostname - - [VARCHAR(500)] - -job_type - - [VARCHAR(30)] - -latest_heartbeat - - [TIMESTAMP] - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -unixname - - [VARCHAR(1000)] - - - -serialized_dag - -serialized_dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -fileloc - - [VARCHAR(2000)] - NOT NULL - -fileloc_hash - - [BIGINT] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -processor_subdir - - [VARCHAR(2000)] + +job + +id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + +end_date + + [TIMESTAMP] + +executor_class + + [VARCHAR(500)] + +hostname + + [VARCHAR(500)] + +job_type + + [VARCHAR(30)] + +latest_heartbeat + + [TIMESTAMP] + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +unixname + + [VARCHAR(1000)] - + asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL - + asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 - + asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 - + dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -dag_display_name - - [VARCHAR(2000)] - -default_view - - [VARCHAR(25)] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_active - - [BOOLEAN] - -is_paused - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -processor_subdir - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +dag_display_name + + [VARCHAR(2000)] + +default_view + + [VARCHAR(25)] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_active + + [BOOLEAN] + +is_paused + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +processor_subdir + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 + + + +dag_version + +dag_version + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + +dag_id + + [VARCHAR(250)] + NOT NULL + +version_name + + [VARCHAR(250)] + +version_number + + [INTEGER] + NOT NULL + + + +dag--dag_version + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL - + dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL - + dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL - + dag--dag_warning - -0..N -1 - - - -log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +0..N +1 - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [BYTEA] - -creating_job_id - - [INTEGER] - -dag_hash - - [VARCHAR(32)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -external_trigger - - [BOOLEAN] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - NOT NULL - -queued_at - - [TIMESTAMP] - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [BYTEA] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +external_trigger + + [BOOLEAN] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + NOT NULL + +queued_at + + [TIMESTAMP] + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +updated_at + + [TIMESTAMP] + + + +dag_version--dag_run + +0..N +{0,1} - - -log_template--dag_run - -0..N -{0,1} + + +dag_code + +dag_code + +id + + [UUID] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +fileloc_hash + + [BIGINT] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + + + +dag_version--dag_code + +0..N +1 - - -dag_run--dagrun_asset_event - -0..N -1 + + +serialized_dag + +serialized_dag + +id + + [UUID] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL + +processor_subdir + + [VARCHAR(2000)] + + + +dag_version--serialized_dag + +0..N +1 task_instance - -task_instance - -id - - [UUID] - NOT NULL - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSON] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSON] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] + + + +dag_version--task_instance + +0..N +{0,1} + + + +dag_run--dagrun_asset_event + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL - + dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] - + dag_run--dag_run_note - -1 -1 + +1 +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -try_number - - [INTEGER] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +try_number + + [INTEGER] + NOT NULL - + dag_run--task_reschedule - -0..N -1 + +0..N +1 - + dag_run--task_reschedule - -0..N -1 + +0..N +1 - + task_instance--task_reschedule - -0..N -1 + +0..N +1 - + task_instance--task_reschedule - -0..N -1 + +0..N +1 - + task_instance--task_reschedule - -0..N -1 + +0..N +1 - + task_instance--task_reschedule - -0..N -1 + +0..N +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSON] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSON] + +length + + [INTEGER] + NOT NULL - + task_instance--task_map - -0..N -1 + +0..N +1 - + task_instance--task_map - -0..N -1 + +0..N +1 - + task_instance--task_map - -0..N -1 + +0..N +1 - + task_instance--task_map - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [BYTEA] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [BYTEA] - + task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] - + task_instance--task_instance_note - -0..N -1 + +0..N +1 - + task_instance--task_instance_note - -0..N -1 + +0..N +1 - + task_instance--task_instance_note - -0..N -1 + +0..N +1 - + task_instance--task_instance_note - -0..N -1 + +0..N +1 - + task_instance_history - -task_instance_history - -id - - [INTEGER] - NOT NULL - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSON] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +id + + [INTEGER] + NOT NULL + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSON] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] - + task_instance--task_instance_history - -0..N -1 + +0..N +1 - + task_instance--task_instance_history - -0..N -1 + +0..N +1 - + task_instance--task_instance_history - -0..N -1 + +0..N +1 - + task_instance--task_instance_history - -0..N -1 + +0..N +1 + + + +log_template + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL + + + +log_template--dag_run + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL - + backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] - + trigger--task_instance - -0..N -{0,1} - - - -alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +0..N +{0,1} session - -session - -id - - [INTEGER] - NOT NULL - -data - - [BYTEA] - -expiry - - [TIMESTAMP] - -session_id - - [VARCHAR(255)] + +session + +id + + [INTEGER] + NOT NULL + +data + + [BYTEA] + +expiry + + [TIMESTAMP] + +session_id + + [VARCHAR(255)] - + +alembic_version + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL + + + ab_user - -ab_user - -id - - [INTEGER] - NOT NULL - -active - - [BOOLEAN] - -changed_by_fk - - [INTEGER] - -changed_on - - [TIMESTAMP] - -created_by_fk - - [INTEGER] - -created_on - - [TIMESTAMP] - -email - - [VARCHAR(512)] - NOT NULL - -fail_login_count - - [INTEGER] - -first_name - - [VARCHAR(256)] - NOT NULL - -last_login - - [TIMESTAMP] - -last_name - - [VARCHAR(256)] - NOT NULL - -login_count - - [INTEGER] - -password - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_user + +id + + [INTEGER] + NOT NULL + +active + + [BOOLEAN] + +changed_by_fk + + [INTEGER] + +changed_on + + [TIMESTAMP] + +created_by_fk + + [INTEGER] + +created_on + + [TIMESTAMP] + +email + + [VARCHAR(512)] + NOT NULL + +fail_login_count + + [INTEGER] + +first_name + + [VARCHAR(256)] + NOT NULL + +last_login + + [TIMESTAMP] + +last_name + + [VARCHAR(256)] + NOT NULL + +login_count + + [INTEGER] + +password + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL - + ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} - + ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} - + ab_user_role - -ab_user_role - -id - - [INTEGER] - NOT NULL - -role_id - - [INTEGER] - -user_id - - [INTEGER] + +ab_user_role + +id + + [INTEGER] + NOT NULL + +role_id + + [INTEGER] + +user_id + + [INTEGER] - + ab_user--ab_user_role - -0..N -{0,1} + +0..N +{0,1} - + ab_register_user - -ab_register_user - -id - - [INTEGER] - NOT NULL - -email - - [VARCHAR(512)] - NOT NULL - -first_name - - [VARCHAR(256)] - NOT NULL - -last_name - - [VARCHAR(256)] - NOT NULL - -password - - [VARCHAR(256)] - -registration_date - - [TIMESTAMP] - -registration_hash - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_register_user + +id + + [INTEGER] + NOT NULL + +email + + [VARCHAR(512)] + NOT NULL + +first_name + + [VARCHAR(256)] + NOT NULL + +last_name + + [VARCHAR(256)] + NOT NULL + +password + + [VARCHAR(256)] + +registration_date + + [TIMESTAMP] + +registration_hash + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL - + ab_permission - -ab_permission - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +ab_permission + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL - + ab_permission_view - -ab_permission_view - -id - - [INTEGER] - NOT NULL - -permission_id - - [INTEGER] - -view_menu_id - - [INTEGER] + +ab_permission_view + +id + + [INTEGER] + NOT NULL + +permission_id + + [INTEGER] + +view_menu_id + + [INTEGER] - + ab_permission--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_permission_view_role - -ab_permission_view_role - -id - - [INTEGER] - NOT NULL - -permission_view_id - - [INTEGER] - -role_id - - [INTEGER] + +ab_permission_view_role + +id + + [INTEGER] + NOT NULL + +permission_view_id + + [INTEGER] + +role_id + + [INTEGER] - + ab_permission_view--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + ab_view_menu - -ab_view_menu - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(250)] - NOT NULL + +ab_view_menu + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(250)] + NOT NULL - + ab_view_menu--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_role - -ab_role - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(64)] - NOT NULL + +ab_role + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(64)] + NOT NULL - + ab_role--ab_user_role - -0..N -{0,1} + +0..N +{0,1} - + ab_role--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + alembic_version_fab - -alembic_version_fab - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version_fab + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/docs/apache-airflow/migrations-ref.rst b/docs/apache-airflow/migrations-ref.rst index 61dde39958e2..bc73f387a2d2 100644 --- a/docs/apache-airflow/migrations-ref.rst +++ b/docs/apache-airflow/migrations-ref.rst @@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``d03e4a635aa3`` (head) | ``d8cd3297971e`` | ``3.0.0`` | Drop DAG pickling. | +| ``2b47dc6bc8df`` (head) | ``d03e4a635aa3`` | ``3.0.0`` | add dag versioning. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``d03e4a635aa3`` | ``d8cd3297971e`` | ``3.0.0`` | Drop DAG pickling. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``d8cd3297971e`` | ``5f57a45b8433`` | ``3.0.0`` | Add last_heartbeat_at directly to TI. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/hatch_build.py b/hatch_build.py index 91b9256b4d03..5e95775c62f2 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -431,6 +431,7 @@ # The issue tracking it is https://github.com/apache/airflow/issues/28723 "sqlalchemy>=1.4.36,<2.0", "sqlalchemy-jsonfield>=1.0", + "sqlalchemy-utils>=0.41.2", "tabulate>=0.7.5", "tenacity>=8.0.0,!=8.2.0", "termcolor>=1.1.0", diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py index 05bac3394ef0..c1f3e71f306b 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py @@ -138,6 +138,7 @@ def _create_dag(self, dag_id): session.add(dag_instance) dag = DAG(dag_id=dag_id, schedule=None, params={"validated_number": Param(1, minimum=1, maximum=10)}) self.app.dag_bag.bag_dag(dag) + self.app.dag_bag.sync_to_db() return dag_instance def _create_test_dag_run(self, state=DagRunState.RUNNING, extra_dag=False, commit=True, idx_start=1): diff --git a/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py b/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py index 16c1df48a9e8..d1782e629074 100755 --- a/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py +++ b/scripts/ci/pre_commit/check_ti_vs_tis_attributes.py @@ -54,6 +54,7 @@ def compare_attributes(path1, path2): "rendered_task_instance_fields", # Storing last heartbeat for historic TIs is not interesting/useful "last_heartbeat_at", + "dag_version", } # exclude attrs not necessary to be in TaskInstanceHistory if not diff: return diff --git a/task_sdk/src/airflow/sdk/definitions/dag.py b/task_sdk/src/airflow/sdk/definitions/dag.py index 479c1ea09b80..9a124d237ed5 100644 --- a/task_sdk/src/airflow/sdk/definitions/dag.py +++ b/task_sdk/src/airflow/sdk/definitions/dag.py @@ -355,6 +355,7 @@ class DAG: **Warning**: A fail stop dag can only have tasks with the default trigger rule ("all_success"). An exception will be thrown if any task in a fail stop dag has a non default trigger rule. :param dag_display_name: The display name of the DAG which appears on the UI. + :param version_name: The version name of the DAG. This is used to identify the version of the DAG. """ __serialized_fields: ClassVar[frozenset[str] | None] = None @@ -437,6 +438,10 @@ class DAG: has_on_success_callback: bool = attrs.field(init=False) has_on_failure_callback: bool = attrs.field(init=False) + version_name: str | None = attrs.field( + default=None, + validator=attrs.validators.optional(attrs.validators.instance_of(str)), + ) def __attrs_post_init__(self): from airflow.utils import timezone @@ -1063,6 +1068,7 @@ def dag( auto_register: bool = True, fail_stop: bool = False, dag_display_name: str | None = None, + version_name: str | None = None, ) -> Callable[[Callable], Callable[..., DAG]]: """ Python dag decorator which wraps a function into an Airflow DAG. diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index 576b28b15353..6bbd63fa8b49 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -89,6 +89,7 @@ def _create_dag(self, dag_id): session.add(dag_instance) dag = DAG(dag_id=dag_id, schedule=None, params={"validated_number": Param(1, minimum=1, maximum=10)}) self.app.dag_bag.bag_dag(dag) + self.app.dag_bag.sync_to_db() return dag_instance def _create_test_dag_run(self, state=DagRunState.RUNNING, extra_dag=False, commit=True, idx_start=1): @@ -1205,12 +1206,14 @@ def test_raises_validation_error_for_invalid_params(self): assert "Invalid input for param" in response.json["detail"] @mock.patch("airflow.api_connexion.endpoints.dag_run_endpoint.get_airflow_app") - def test_dagrun_creation_exception_is_handled(self, mock_get_app, session): + @mock.patch("airflow.api_connexion.endpoints.dag_run_endpoint.DagVersion") + def test_dagrun_creation_exception_is_handled(self, mock_get_dag_version, mock_get_app, session): self._create_dag("TEST_DAG_ID") error_message = "Encountered Error" mock_get_app.return_value.dag_bag.get_dag.return_value.create_dagrun.side_effect = ValueError( error_message ) + mock_get_dag_version.get_latest_version.return_value = mock.MagicMock() response = self.client.post( "api/v1/dags/TEST_DAG_ID/dagRuns", json={"execution_date": "2020-11-10T08:25:56Z"}, diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index b558f8dbf161..a6e47f4f6a1b 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -78,7 +78,6 @@ def setup_dag(self, configured_app): with DAG(self.unscheduled_dag_id, start_date=None, schedule=None) as unscheduled_dag: task4 = EmptyOperator(task_id=self.unscheduled_task_id1, params={"is_unscheduled": True}) task5 = EmptyOperator(task_id=self.unscheduled_task_id2, params={"is_unscheduled": True}) - task1 >> task2 task4 >> task5 dag_bag = DagBag(os.devnull, include_examples=False) @@ -87,6 +86,7 @@ def setup_dag(self, configured_app): mapped_dag.dag_id: mapped_dag, unscheduled_dag.dag_id: unscheduled_dag, } + DagBag._sync_to_db(dag_bag.dags) configured_app.dag_bag = dag_bag # type:ignore @staticmethod @@ -246,7 +246,9 @@ def test_unscheduled_task(self): def test_should_respond_200_serialized(self): # Get the dag out of the dagbag before we patch it to an empty one - SerializedDagModel.write_dag(self.app.dag_bag.get_dag(self.dag_id)) + dag = self.app.dag_bag.get_dag(self.dag_id) + dag.sync_to_db() + SerializedDagModel.write_dag(dag) dag_bag = DagBag(os.devnull, include_examples=False, read_dags_from_db=True) patcher = unittest.mock.patch.object(self.app, "dag_bag", dag_bag) diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index ed1a2c28754f..50e3d393f810 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -227,7 +227,7 @@ def test_cli_test_different_path(self, session, tmp_path): .one() ) # confirm that the serialized dag location has not been updated - assert ser_dag.fileloc == orig_file_path.as_posix() + assert ser_dag.dag_version.dag_code.fileloc == orig_file_path.as_posix() assert ser_dag.data["dag"]["_processor_dags_folder"] == orig_dags_folder.as_posix() assert ser_dag.data["dag"]["fileloc"] == orig_file_path.as_posix() assert ser_dag.dag._processor_dags_folder == orig_dags_folder.as_posix() diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 192a12358e8d..2bf0bcb6dbb0 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -53,6 +53,7 @@ from airflow.jobs.dag_processor_job_runner import DagProcessorJobRunner from airflow.jobs.job import Job from airflow.models import DagBag, DagModel, DbCallbackRequest +from airflow.models.dag_version import DagVersion from airflow.models.dagcode import DagCode from airflow.models.serialized_dag import SerializedDagModel from airflow.utils import timezone @@ -664,13 +665,6 @@ def test_scan_stale_dags(self): ) assert active_dag_count == 1 - serialized_dag_count = ( - session.query(func.count(SerializedDagModel.dag_id)) - .filter(SerializedDagModel.fileloc == test_dag_path) - .scalar() - ) - assert serialized_dag_count == 1 - manager.processor._scan_stale_dags() active_dag_count = ( @@ -682,10 +676,12 @@ def test_scan_stale_dags(self): serialized_dag_count = ( session.query(func.count(SerializedDagModel.dag_id)) - .filter(SerializedDagModel.fileloc == test_dag_path) + .filter(SerializedDagModel.dag_id == dag.dag_id) .scalar() ) - assert serialized_dag_count == 0 + # Deactivating the DagModel should not delete the SerializedDagModel + # SerializedDagModel gives history about Dags + assert serialized_dag_count == 1 @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode @conf_vars( @@ -1088,10 +1084,12 @@ def test_refresh_dags_dir_deactivates_deleted_zipped_dags(self, tmp_path): with mock.patch("airflow.dag_processing.manager.might_contain_dag", return_value=False): manager.processor._refresh_dag_dir() - # Assert dag removed from SDM - assert not SerializedDagModel.has_dag("test_zip_dag") - # assert code deleted - assert not DagCode.has_dag(dag.fileloc) + # Deleting the python file should not delete SDM for versioning sake + assert SerializedDagModel.has_dag("test_zip_dag") + # assert code not deleted for versioning sake + assert DagCode.has_dag(dag.fileloc) + # assert dagversion was not deleted + assert DagVersion.get_latest_version(dag.dag_id) # assert dag deactivated assert not dag.get_is_active() diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index f117b3ffe458..29aa4e15a388 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -161,6 +161,7 @@ def test_execute_on_failure_callbacks_without_dag(self, mock_ti_handle_failure, with create_session() as session: session.query(TaskInstance).delete() dag = dagbag.get_dag("example_branch_operator") + dag.sync_to_db() triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} dagrun = dag.create_dagrun( state=State.RUNNING, diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index da3ccc201eb4..d0b147a5c372 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -54,6 +54,7 @@ from airflow.models.asset import AssetActive, AssetDagRunQueue, AssetEvent, AssetModel from airflow.models.backfill import Backfill, _create_backfill from airflow.models.dag import DAG, DagModel +from airflow.models.dag_version import DagVersion from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun from airflow.models.db_callback_request import DbCallbackRequest @@ -141,11 +142,10 @@ def clean_db(): clear_db_runs() clear_db_backfills() clear_db_pools() - clear_db_dags() clear_db_import_errors() clear_db_jobs() clear_db_assets() - # DO NOT try to run clear_db_serialized_dags() here - this will break the tests + # DO NOT try to run clear_db_serialized_dags() or clear_db_dags here - this will break the tests # The tests expect DAGs to be fully loaded here via setUpClass method below @pytest.fixture(autouse=True) @@ -167,9 +167,7 @@ def set_instance_attrs(self, dagbag) -> Generator: # enqueue! self.null_exec: MockExecutor | None = MockExecutor() # Since we don't want to store the code for the DAG defined in this file - with patch("airflow.dag_processing.manager.SerializedDagModel.remove_deleted_dags"), patch( - "airflow.models.dag.DagCode.bulk_sync_to_db" - ): + with patch("airflow.models.serialized_dag.SerializedDagModel.remove_deleted_dags"): yield self.null_exec = None @@ -2875,7 +2873,6 @@ def test_dagrun_root_after_dagrun_unfinished(self, mock_executor): Noted: the DagRun state could be still in running state during CI. """ - clear_db_dags() dag_id = "test_dagrun_states_root_future" dag = self.dagbag.get_dag(dag_id) dag.sync_to_db() @@ -3316,7 +3313,7 @@ def test_verify_integrity_if_dag_not_changed(self, dag_maker): assert tis_count == 1 latest_dag_version = SerializedDagModel.get_latest_version_hash(dr.dag_id, session=session) - assert dr.dag_hash == latest_dag_version + assert dr.dag_version.serialized_dag.dag_hash == latest_dag_version session.rollback() session.close() @@ -3350,7 +3347,7 @@ def test_verify_integrity_if_dag_changed(self, dag_maker): dr = drs[0] dag_version_1 = SerializedDagModel.get_latest_version_hash(dr.dag_id, session=session) - assert dr.dag_hash == dag_version_1 + assert dr.dag_version.serialized_dag.dag_hash == dag_version_1 assert self.job_runner.dagbag.dags == {"test_verify_integrity_if_dag_changed": dag} assert len(self.job_runner.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 1 @@ -3367,7 +3364,7 @@ def test_verify_integrity_if_dag_changed(self, dag_maker): drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 dr = drs[0] - assert dr.dag_hash == dag_version_2 + assert dr.dag_version.serialized_dag.dag_hash == dag_version_2 assert self.job_runner.dagbag.dags == {"test_verify_integrity_if_dag_changed": dag} assert len(self.job_runner.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 2 @@ -3383,54 +3380,7 @@ def test_verify_integrity_if_dag_changed(self, dag_maker): assert tis_count == 2 latest_dag_version = SerializedDagModel.get_latest_version_hash(dr.dag_id, session=session) - assert dr.dag_hash == latest_dag_version - - session.rollback() - session.close() - - def test_verify_integrity_if_dag_disappeared(self, dag_maker, caplog): - # CleanUp - with create_session() as session: - session.query(SerializedDagModel).filter( - SerializedDagModel.dag_id == "test_verify_integrity_if_dag_disappeared" - ).delete(synchronize_session=False) - - with dag_maker(dag_id="test_verify_integrity_if_dag_disappeared") as dag: - BashOperator(task_id="dummy", bash_command="echo hi") - - scheduler_job = Job() - self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) - - session = settings.Session() - orm_dag = dag_maker.dag_model - assert orm_dag is not None - - scheduler_job = Job() - self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) - - self.job_runner.processor_agent = mock.MagicMock() - dag = self.job_runner.dagbag.get_dag("test_verify_integrity_if_dag_disappeared", session=session) - self.job_runner._create_dag_runs([orm_dag], session) - dag_id = dag.dag_id - drs = DagRun.find(dag_id=dag_id, session=session) - assert len(drs) == 1 - dr = drs[0] - - dag_version_1 = SerializedDagModel.get_latest_version_hash(dag_id, session=session) - assert dr.dag_hash == dag_version_1 - assert self.job_runner.dagbag.dags == {"test_verify_integrity_if_dag_disappeared": dag} - assert len(self.job_runner.dagbag.dags.get("test_verify_integrity_if_dag_disappeared").tasks) == 1 - - SerializedDagModel.remove_dag(dag_id=dag_id) - dag = self.job_runner.dagbag.dags[dag_id] - self.job_runner.dagbag.dags = MagicMock() - self.job_runner.dagbag.dags.get.side_effect = [dag, None] - session.flush() - with caplog.at_level(logging.WARNING): - callback = self.job_runner._schedule_dag_run(dr, session) - assert "The DAG disappeared before verifying integrity" in caplog.text - - assert callback is None + assert dr.dag_version.serialized_dag.dag_hash == latest_dag_version session.rollback() session.close() @@ -4015,6 +3965,7 @@ def test_create_dag_runs_assets(self, session, dag_maker): - That the run created is on QUEUED State - That dag_model has next_dagrun """ + clear_db_dags() asset1 = Asset(uri="ds1") asset2 = Asset(uri="ds2") @@ -4386,6 +4337,7 @@ def test_do_schedule_max_active_runs_dag_timed_out(self, dag_maker): session = settings.Session() data_interval = dag.infer_automated_data_interval(DEFAULT_LOGICAL_DATE) triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} + dag_version = DagVersion.get_latest_version(dag.dag_id) run1 = dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=DEFAULT_DATE, @@ -4393,6 +4345,7 @@ def test_do_schedule_max_active_runs_dag_timed_out(self, dag_maker): start_date=timezone.utcnow() - timedelta(seconds=2), session=session, data_interval=data_interval, + dag_version=dag_version, **triggered_by_kwargs, ) @@ -4405,6 +4358,7 @@ def test_do_schedule_max_active_runs_dag_timed_out(self, dag_maker): state=State.QUEUED, session=session, data_interval=data_interval, + dag_version=dag_version, **triggered_by_kwargs, ) @@ -4602,10 +4556,8 @@ def test_do_schedule_max_active_runs_and_manual_trigger(self, dag_maker, mock_ex BashOperator(task_id="dummy3", bash_command="true") session = settings.Session() - dag_run = dag_maker.create_dagrun( - state=State.QUEUED, - session=session, - ) + dag_version = DagVersion.get_latest_version(dag.dag_id) + dag_run = dag_maker.create_dagrun(state=State.QUEUED, session=session, dag_version=dag_version) dag.sync_to_db(session=session) # Update the date fields @@ -4647,23 +4599,31 @@ def test_max_active_runs_in_a_dag_doesnt_stop_running_dag_runs_in_other_dags(sel start_date=DEFAULT_DATE, schedule=timedelta(hours=1), max_active_runs=1, - ): + ) as dag: EmptyOperator(task_id="mytask") - - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dag_version = DagVersion.get_latest_version(dag.dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(29): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) with dag_maker( "test_dag2", start_date=timezone.datetime(2020, 1, 1), schedule=timedelta(hours=1), - ): + ) as dag2: EmptyOperator(task_id="mytask") - - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dag_version = DagVersion.get_latest_version(dag2.dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(9): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) scheduler_job = Job() self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) @@ -4695,20 +4655,29 @@ def test_max_active_runs_in_a_dag_doesnt_prevent_backfill_from_running(self, dag ) as dag: EmptyOperator(task_id="mytask") dag1_dag_id = dag.dag_id - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dag_version = DagVersion.get_latest_version(dag1_dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(29): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) with dag_maker( "test_dag2", start_date=timezone.datetime(2020, 1, 1), schedule=timedelta(days=1), - ): + ) as dag: EmptyOperator(task_id="mytask") - - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dag_version = DagVersion.get_latest_version(dag.dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(9): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) scheduler_job = Job() self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) @@ -4842,19 +4811,30 @@ def _running_counts(): ) dag1_non_b_running, dag1_b_running, total_running = _running_counts() + dag_version = DagVersion.get_latest_version(dag1_dag_id) # now let's create some "normal" dag runs and verify that they can run - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(29): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) with dag_maker( "test_dag2", start_date=timezone.datetime(2020, 1, 1), schedule=timedelta(days=1), - ): + ) as dag2: EmptyOperator(task_id="mytask") - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + + dag_version = DagVersion.get_latest_version(dag2.dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(9): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) # initial state -- nothing is running assert dag1_non_b_running == 0 @@ -4982,19 +4962,30 @@ def _running_counts(): assert session.scalar(select(func.count()).select_from(DagRun)) == 6 assert session.scalar(select(func.count()).where(DagRun.dag_id == dag1_dag_id)) == 6 + dag_version = DagVersion.get_latest_version(dag1_dag_id) # now let's create some "normal" dag runs and verify that they can run - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(29): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) with dag_maker( "test_dag2", start_date=timezone.datetime(2020, 1, 1), schedule=timedelta(days=1), - ): + ) as dag2: EmptyOperator(task_id="mytask") - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) + + dag_version = DagVersion.get_latest_version(dag2.dag_id) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) for _ in range(9): - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) # ok at this point, there are new dag runs created, but no new running runs dag1_non_b_running, dag1_b_running, total_running = _running_counts() @@ -5130,9 +5121,14 @@ def test_start_queued_dagruns_do_follow_execution_date_order(self, dag_maker): with dag_maker("test_dag1", max_active_runs=1): EmptyOperator(task_id="mytask") date = DEFAULT_DATE + dag_version = DagVersion.get_latest_version("test_dag1") for i in range(30): dr = dag_maker.create_dagrun( - run_id=f"dagrun_{i}", run_type=DagRunType.SCHEDULED, state=State.QUEUED, execution_date=date + run_id=f"dagrun_{i}", + run_type=DagRunType.SCHEDULED, + state=State.QUEUED, + execution_date=date, + dag_version=dag_version, ) date = dr.execution_date + timedelta(hours=1) scheduler_job = Job() @@ -5175,11 +5171,15 @@ def test_no_dagruns_would_stuck_in_running(self, dag_maker): with dag_maker("test_dagrun_states_are_correct_1", max_active_runs=1, start_date=date) as dag: task1 = EmptyOperator(task_id="dummy_task") - dr1_running = dag_maker.create_dagrun(run_id="dr1_run_1", execution_date=date) + dag_version = DagVersion.get_latest_version(dag.dag_id) + dr1_running = dag_maker.create_dagrun( + run_id="dr1_run_1", execution_date=date, dag_version=dag_version + ) data_interval = dag.infer_automated_data_interval(logical_date) dag_maker.create_dagrun( run_id="dr1_run_2", state=State.QUEUED, + dag_version=dag_version, execution_date=dag.next_dagrun_info( last_automated_dagrun=data_interval, restricted=False ).data_interval.start, @@ -5188,26 +5188,48 @@ def test_no_dagruns_would_stuck_in_running(self, dag_maker): date = timezone.datetime(2020, 1, 1) with dag_maker("test_dagrun_states_are_correct_2", start_date=date) as dag: EmptyOperator(task_id="dummy_task") + dag_version = DagVersion.get_latest_version(dag.dag_id) for i in range(16): - dr = dag_maker.create_dagrun(run_id=f"dr2_run_{i+1}", state=State.RUNNING, execution_date=date) + dr = dag_maker.create_dagrun( + run_id=f"dr2_run_{i+1}", + state=State.RUNNING, + execution_date=date, + dag_version=dag_version, + ) date = dr.execution_date + timedelta(hours=1) dr16 = DagRun.find(run_id="dr2_run_16") date = dr16[0].execution_date + timedelta(hours=1) for i in range(16, 32): - dr = dag_maker.create_dagrun(run_id=f"dr2_run_{i+1}", state=State.QUEUED, execution_date=date) + dr = dag_maker.create_dagrun( + run_id=f"dr2_run_{i+1}", + state=State.QUEUED, + execution_date=date, + dag_version=dag_version, + ) date = dr.execution_date + timedelta(hours=1) # third dag and dagruns date = timezone.datetime(2021, 1, 1) with dag_maker("test_dagrun_states_are_correct_3", start_date=date) as dag: EmptyOperator(task_id="dummy_task") + dag_version = DagVersion.get_latest_version(dag.dag_id) for i in range(16): - dr = dag_maker.create_dagrun(run_id=f"dr3_run_{i+1}", state=State.RUNNING, execution_date=date) + dr = dag_maker.create_dagrun( + run_id=f"dr3_run_{i+1}", + state=State.RUNNING, + execution_date=date, + dag_version=dag_version, + ) date = dr.execution_date + timedelta(hours=1) dr16 = DagRun.find(run_id="dr3_run_16") date = dr16[0].execution_date + timedelta(hours=1) for i in range(16, 32): - dr = dag_maker.create_dagrun(run_id=f"dr2_run_{i+1}", state=State.QUEUED, execution_date=date) + dr = dag_maker.create_dagrun( + run_id=f"dr2_run_{i+1}", + state=State.QUEUED, + execution_date=date, + dag_version=dag_version, + ) date = dr.execution_date + timedelta(hours=1) scheduler_job = Job() @@ -5486,11 +5508,17 @@ def test_runs_respected_after_clear(self, dag_maker): self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) self.job_runner.processor_agent = mock.MagicMock() - + dag_version = DagVersion.get_latest_version(dag.dag_id) session = settings.Session() - dr = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.QUEUED) - dr = dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) - dag_maker.create_dagrun_after(dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED) + dr = dag_maker.create_dagrun( + run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) + dr = dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) + dag_maker.create_dagrun_after( + dr, run_type=DagRunType.SCHEDULED, state=State.QUEUED, dag_version=dag_version + ) dag.clear() assert len(DagRun.find(dag_id=dag.dag_id, state=State.QUEUED, session=session)) == 3 @@ -5816,6 +5844,7 @@ def test_find_zombies_handle_failure_callbacks_are_correctly_passed_to_dag_proce assert expected_failure_callback_requests[0] == callback_requests[0] def test_cleanup_stale_dags(self): + clear_db_dags() dagbag = DagBag(TEST_DAG_FOLDER, read_dags_from_db=False) with create_session() as session: dag = dagbag.get_dag("test_example_bash_operator") diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index e38beb2110ca..fc7a2b24b836 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -62,6 +62,7 @@ dag as dag_decorator, get_asset_triggered_next_run_info, ) +from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun from airflow.models.param import DagParam, Param from airflow.models.serialized_dag import SerializedDagModel @@ -141,14 +142,11 @@ def setup_method(self) -> None: clear_db_runs() clear_db_dags() clear_db_assets() - self.patcher_dag_code = mock.patch("airflow.models.dag.DagCode.bulk_sync_to_db") - self.patcher_dag_code.start() def teardown_method(self) -> None: clear_db_runs() clear_db_dags() clear_db_assets() - self.patcher_dag_code.stop() @staticmethod def _clean_up(dag_id: str): @@ -1039,14 +1037,16 @@ def test_existing_dag_is_paused_config(self): assert dag.max_consecutive_failed_dag_runs == 2 def test_existing_dag_is_paused_after_limit(self): - def add_failed_dag_run(id, execution_date): + def add_failed_dag_run(dag, id, execution_date): triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} + dag_v = DagVersion.get_latest_version(dag_id=dag.dag_id) dr = dag.create_dagrun( run_type=DagRunType.MANUAL, run_id="run_id_" + id, execution_date=execution_date, state=State.FAILED, data_interval=(execution_date, execution_date), + dag_version=dag_v, **triggered_by_kwargs, ) ti_op1 = dr.get_task_instance(task_id=op1.task_id, session=session) @@ -1059,14 +1059,16 @@ def add_failed_dag_run(id, execution_date): dag.add_task(op1) session = settings.Session() dag.sync_to_db(session=session) + SerializedDagModel.write_dag(dag) assert not dag.get_is_paused() # dag should be paused after 2 failed dag_runs add_failed_dag_run( + dag, "1", TEST_DATE, ) - add_failed_dag_run("2", TEST_DATE + timedelta(days=1)) + add_failed_dag_run(dag, "2", TEST_DATE + timedelta(days=1)) assert dag.get_is_paused() dag.clear() self._clean_up(dag_id) @@ -1085,8 +1087,7 @@ def test_dag_is_deactivated_upon_dagfile_deletion(self): dag = DAG(dag_id, schedule=None, is_paused_upon_creation=True) dag.fileloc = dag_fileloc session = settings.Session() - with mock.patch("airflow.models.dag.DagCode.bulk_sync_to_db"): - dag.sync_to_db(session=session, processor_subdir="/usr/local/airflow/dags/") + dag.sync_to_db(session=session, processor_subdir="/usr/local/airflow/dags/") orm_dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one() @@ -2370,9 +2371,8 @@ def test_relative_fileloc_serialized( """ dag = DAG(dag_id="test", schedule=None) dag.fileloc = fileloc - sdm = SerializedDagModel(dag) - session.add(sdm) - session.commit() + dag.sync_to_db() + SerializedDagModel.write_dag(dag) session.expunge_all() sdm = SerializedDagModel.get(dag.dag_id, session) dag = sdm.dag @@ -2383,8 +2383,10 @@ def test__processor_dags_folder(self, session): """Only populated after deserializtion""" dag = DAG(dag_id="test", schedule=None) dag.fileloc = "/abc/test.py" + dag.sync_to_db() assert dag._processor_dags_folder is None - sdm = SerializedDagModel(dag) + SerializedDagModel.write_dag(dag) + sdm = SerializedDagModel.get(dag.dag_id, session) assert sdm.dag._processor_dags_folder == settings.DAGS_FOLDER @pytest.mark.need_serialized_dag diff --git a/tests/models/test_dag_version.py b/tests/models/test_dag_version.py new file mode 100644 index 000000000000..42a33b4b66f1 --- /dev/null +++ b/tests/models/test_dag_version.py @@ -0,0 +1,113 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest +from sqlalchemy import func, select + +from airflow.models.dag_version import DagVersion +from airflow.models.serialized_dag import SerializedDagModel +from airflow.operators.empty import EmptyOperator + +from tests_common.test_utils.db import clear_db_dags + +pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] + + +class TestDagVersion: + def setup_method(self): + clear_db_dags() + + def teardown_method(self): + clear_db_dags() + + @pytest.mark.need_serialized_dag + def test_writing_dag_version(self, dag_maker, session): + with dag_maker("test_writing_dag_version") as dag: + pass + + latest_version = DagVersion.get_latest_version(dag.dag_id) + assert latest_version.version_number == 1 + assert not latest_version.version_name + assert latest_version.dag_id == dag.dag_id + + @pytest.mark.need_serialized_dag + def test_writing_dag_version_with_version_name(self, dag_maker, session): + version_name = "my_version" + with dag_maker(version_name=version_name) as dag: + pass + + latest_version = DagVersion.get_latest_version(dag.dag_id) + assert latest_version.version_number == 1 + assert latest_version.version_name == version_name + assert latest_version.dag_id == dag.dag_id + + def test_writing_dag_version_with_changes(self, dag_maker, session): + """This also tested the get_latest_version method""" + version_name = "my_version" + with dag_maker("test1", version_name=version_name) as dag: + EmptyOperator(task_id="task1") + dag.sync_to_db() + SerializedDagModel.write_dag(dag) + # Add extra task to change the dag + with dag_maker("test1", version_name=version_name) as dag2: + EmptyOperator(task_id="task1") + EmptyOperator(task_id="task2") + dag2.sync_to_db() + SerializedDagModel.write_dag(dag2) + + latest_version = DagVersion.get_latest_version(dag.dag_id) + assert latest_version.version_number == 2 + assert latest_version.version_name == version_name + assert 2 == session.scalar(select(func.count()).where(DagVersion.dag_id == dag.dag_id)) + + @pytest.mark.need_serialized_dag + def test_get_version(self, dag_maker, session): + """The two dags have the same version name and number but different dag ids""" + version_name = "my_version" + dag1_id = "test1" + with dag_maker(dag1_id, version_name=version_name): + EmptyOperator(task_id="task1") + + with dag_maker("test2", version_name=version_name): + EmptyOperator(task_id="task1") + + with dag_maker("test3"): + EmptyOperator(task_id="task1") + + version = DagVersion.get_version(dag1_id) + assert version.version_number == 1 + assert version.version_name == version_name + assert version.dag_id == dag1_id + assert version.version == "my_version-1" + + @pytest.mark.need_serialized_dag + def test_version_property(self, dag_maker): + version_name = "my_version" + with dag_maker("test1", version_name=version_name) as dag: + pass + + latest_version = DagVersion.get_latest_version(dag.dag_id) + assert latest_version.version == f"{version_name}-1" + + @pytest.mark.need_serialized_dag + def test_version_property_with_null_version_name(self, dag_maker): + with dag_maker("test1") as dag: + pass + + latest_version = DagVersion.get_latest_version(dag.dag_id) + assert latest_version.version == "1" diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index d91f6738822a..6915e4df0c1f 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -64,10 +64,10 @@ def db_clean_up(): class TestDagBag: - def setup_class(self): + def setup_class(cls): db_clean_up() - def teardown_class(self): + def teardown_class(cls): db_clean_up() def test_get_existing_dag(self, tmp_path): @@ -723,6 +723,7 @@ def _sync_to_db(): dagbag.sync_to_db(session=session) dag = dagbag.dags["test_example_bash_operator"] + dag.sync_to_db() _sync_to_db() mock_sync_perm_for_dag.assert_called_once_with(dag, session=session) @@ -820,6 +821,7 @@ def test_get_dag_with_dag_serialization(self): with time_machine.travel((tz.datetime(2020, 1, 5, 0, 0, 0)), tick=False): example_bash_op_dag = DagBag(include_examples=True).dags.get("example_bash_operator") + example_bash_op_dag.sync_to_db() SerializedDagModel.write_dag(dag=example_bash_op_dag) dag_bag = DagBag(read_dags_from_db=True) @@ -837,6 +839,7 @@ def test_get_dag_with_dag_serialization(self): # Make a change in the DAG and write Serialized DAG to the DB with time_machine.travel((tz.datetime(2020, 1, 5, 0, 0, 6)), tick=False): example_bash_op_dag.tags.add("new_tag") + example_bash_op_dag.sync_to_db() SerializedDagModel.write_dag(dag=example_bash_op_dag) # Since min_serialized_dag_fetch_interval is passed verify that calling 'dag_bag.get_dag' @@ -852,15 +855,16 @@ def test_get_dag_with_dag_serialization(self): @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode @patch("airflow.models.dagbag.settings.MIN_SERIALIZED_DAG_UPDATE_INTERVAL", 5) @patch("airflow.models.dagbag.settings.MIN_SERIALIZED_DAG_FETCH_INTERVAL", 5) - def test_get_dag_refresh_race_condition(self): + def test_get_dag_refresh_race_condition(self, session): """ Test that DagBag.get_dag correctly refresh the Serialized DAG even if SerializedDagModel.last_updated is before DagBag.dags_last_fetched. """ - + db_clean_up() # serialize the initial version of the DAG with time_machine.travel((tz.datetime(2020, 1, 5, 0, 0, 0)), tick=False): example_bash_op_dag = DagBag(include_examples=True).dags.get("example_bash_operator") + example_bash_op_dag.sync_to_db() SerializedDagModel.write_dag(dag=example_bash_op_dag) # deserialize the DAG @@ -886,6 +890,7 @@ def test_get_dag_refresh_race_condition(self): # long before the transaction is committed with time_machine.travel((tz.datetime(2020, 1, 5, 1, 0, 0)), tick=False): example_bash_op_dag.tags.add("new_tag") + example_bash_op_dag.sync_to_db() SerializedDagModel.write_dag(dag=example_bash_op_dag) # Since min_serialized_dag_fetch_interval is passed verify that calling 'dag_bag.get_dag' @@ -906,6 +911,7 @@ def test_collect_dags_from_db(self): example_dags = dagbag.dags for dag in example_dags.values(): + dag.sync_to_db() SerializedDagModel.write_dag(dag) new_dagbag = DagBag(read_dags_from_db=True) diff --git a/tests/models/test_dagcode.py b/tests/models/test_dagcode.py index 26b29ea8f9c0..fd7d761f9103 100644 --- a/tests/models/test_dagcode.py +++ b/tests/models/test_dagcode.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -from datetime import timedelta from unittest.mock import patch import pytest @@ -25,13 +24,16 @@ import airflow.example_dags as example_dags_module from airflow.exceptions import AirflowException from airflow.models import DagBag +from airflow.models.dag import DAG +from airflow.models.dag_version import DagVersion from airflow.models.dagcode import DagCode +from airflow.models.serialized_dag import SerializedDagModel as SDM # To move it to a shared module. from airflow.utils.file import open_maybe_zipped from airflow.utils.session import create_session -from tests_common.test_utils.db import clear_db_dag_code +from tests_common.test_utils.db import clear_db_dag_code, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -39,6 +41,7 @@ def make_example_dags(module): """Loads DAGs from a module for test.""" dagbag = DagBag(module.__path__[0]) + DAG.bulk_write_to_db(dagbag.dags.values()) return dagbag.dags @@ -46,55 +49,35 @@ class TestDagCode: """Unit tests for DagCode.""" def setup_method(self): + clear_db_dags() clear_db_dag_code() def teardown_method(self): + clear_db_dags() clear_db_dag_code() def _write_two_example_dags(self): example_dags = make_example_dags(example_dags_module) bash_dag = example_dags["example_bash_operator"] - DagCode(bash_dag.fileloc).sync_to_db() + dag_version = DagVersion.get_latest_version("example_bash_operator") + DagCode(dag_version, bash_dag.fileloc).sync_to_db() xcom_dag = example_dags["example_xcom"] - DagCode(xcom_dag.fileloc).sync_to_db() + dag_version = DagVersion.get_latest_version("example_xcom") + DagCode(dag_version, xcom_dag.fileloc).sync_to_db() return [bash_dag, xcom_dag] def _write_example_dags(self): example_dags = make_example_dags(example_dags_module) for dag in example_dags.values(): - dag.sync_to_db() + SDM.write_dag(dag) return example_dags - def test_sync_to_db(self): + def test_write_to_db(self): """Dg code can be written into database.""" example_dags = self._write_example_dags() self._compare_example_dags(example_dags) - def test_bulk_sync_to_db(self): - """Dg code can be bulk written into database.""" - example_dags = make_example_dags(example_dags_module) - files = [dag.fileloc for dag in example_dags.values()] - with create_session() as session: - DagCode.bulk_sync_to_db(files, session=session) - session.commit() - - self._compare_example_dags(example_dags) - - def test_bulk_sync_to_db_half_files(self): - """Dg code can be bulk written into database.""" - example_dags = make_example_dags(example_dags_module) - files = [dag.fileloc for dag in example_dags.values()] - half_files = files[: len(files) // 2] - with create_session() as session: - DagCode.bulk_sync_to_db(half_files, session=session) - session.commit() - with create_session() as session: - DagCode.bulk_sync_to_db(files, session=session) - session.commit() - - self._compare_example_dags(example_dags) - @patch.object(DagCode, "dag_fileloc_hash") def test_detecting_duplicate_key(self, mock_hash): """Dag code detects duplicate key.""" @@ -112,6 +95,8 @@ def _compare_example_dags(self, example_dags): session.query(DagCode.fileloc, DagCode.fileloc_hash, DagCode.source_code) .filter(DagCode.fileloc == dag.fileloc) .filter(DagCode.fileloc_hash == dag_fileloc_hash) + .order_by(DagCode.last_updated.desc()) + .limit(1) .one() ) @@ -126,7 +111,7 @@ def test_code_can_be_read_when_no_access_to_file(self): Source Code should at least exist in one of DB or File. """ example_dag = make_example_dags(example_dags_module).get("example_bash_operator") - example_dag.sync_to_db() + SDM.write_dag(example_dag) # Mock that there is no access to the Dag File with patch("airflow.models.dagcode.open_maybe_zipped") as mock_open: @@ -136,27 +121,50 @@ def test_code_can_be_read_when_no_access_to_file(self): for test_string in ["example_bash_operator", "also_run_this", "run_this_last"]: assert test_string in dag_code - def test_db_code_updated_on_dag_file_change(self): - """Test if DagCode is updated in DB when DAG file is changed""" + def test_db_code_created_on_serdag_change(self, session): + """Test new DagCode is created in DB when DAG file is changed""" example_dag = make_example_dags(example_dags_module).get("example_bash_operator") - example_dag.sync_to_db() - - with create_session() as session: - result = session.query(DagCode).filter(DagCode.fileloc == example_dag.fileloc).one() - - assert result.fileloc == example_dag.fileloc - assert result.source_code is not None + SDM.write_dag(example_dag) - with patch("airflow.models.dagcode.os.path.getmtime") as mock_mtime: - mock_mtime.return_value = (result.last_updated + timedelta(seconds=1)).timestamp() + result = ( + session.query(DagCode) + .filter(DagCode.fileloc == example_dag.fileloc) + .order_by(DagCode.last_updated.desc()) + .limit(1) + .one() + ) - with patch("airflow.models.dagcode.DagCode._get_code_from_file") as mock_code: - mock_code.return_value = "# dummy code" - example_dag.sync_to_db() + assert result.fileloc == example_dag.fileloc + assert result.source_code is not None - with create_session() as session: - new_result = session.query(DagCode).filter(DagCode.fileloc == example_dag.fileloc).one() - - assert new_result.fileloc == example_dag.fileloc - assert new_result.source_code == "# dummy code" - assert new_result.last_updated > result.last_updated + example_dag = make_example_dags(example_dags_module).get("example_bash_operator") + SDM.write_dag(example_dag, processor_subdir="/tmp") + with patch("airflow.models.dagcode.DagCode._get_code_from_file") as mock_code: + mock_code.return_value = "# dummy code" + SDM.write_dag(example_dag) + + new_result = ( + session.query(DagCode) + .filter(DagCode.fileloc == example_dag.fileloc) + .order_by(DagCode.last_updated.desc()) + .limit(1) + .one() + ) + + assert new_result.fileloc == example_dag.fileloc + assert new_result.source_code != result.source_code + assert new_result.last_updated > result.last_updated + + def test_has_dag(self, dag_maker): + """Test has_dag method.""" + with dag_maker("test_has_dag") as dag: + pass + dag.sync_to_db() + SDM.write_dag(dag) + + with dag_maker() as dag2: + pass + dag2.sync_to_db() + SDM.write_dag(dag2) + + assert DagCode.has_dag(dag.fileloc) diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index e8889fb10223..dc5a8ab66e56 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -88,6 +88,7 @@ def _clean_db(): db.clear_db_variables() db.clear_db_assets() db.clear_db_xcom() + db.clear_db_dags() def create_dag_run( self, diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index cdff883760c9..d0bfe37a69cc 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -23,7 +23,7 @@ import pendulum import pytest -from sqlalchemy import select +from sqlalchemy import func, select import airflow.example_dags as example_dags_module from airflow.assets import Asset @@ -31,6 +31,7 @@ from airflow.models.dagbag import DagBag from airflow.models.dagcode import DagCode from airflow.models.serialized_dag import SerializedDagModel as SDM +from airflow.operators.empty import EmptyOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.settings import json @@ -47,6 +48,7 @@ def make_example_dags(module): """Loads DAGs from a module for test.""" dagbag = DagBag(module.__path__[0]) + DAG.bulk_write_to_db(dagbag.dags.values()) return dagbag.dags @@ -61,6 +63,7 @@ class TestSerializedDagModel: ], ) def setup_test_cases(self, request, monkeypatch): + db.clear_db_dags() db.clear_db_serialized_dags() with mock.patch("airflow.models.serialized_dag.COMPRESS_SERIALIZED_DAGS", request.param): yield @@ -86,7 +89,7 @@ def test_write_dag(self): assert SDM.has_dag(dag.dag_id) result = session.query(SDM).filter(SDM.dag_id == dag.dag_id).one() - assert result.fileloc == dag.fileloc + assert result.dag_version.dag_code.fileloc == dag.fileloc # Verifies JSON schema. SerializedDAG.validate_schema(result.data) @@ -98,29 +101,28 @@ def test_serialized_dag_is_updated_if_dag_is_changed(self): dag_updated = SDM.write_dag(dag=example_bash_op_dag) assert dag_updated is True - with create_session() as session: - s_dag = session.get(SDM, example_bash_op_dag.dag_id) + s_dag = SDM.get(example_bash_op_dag.dag_id) - # Test that if DAG is not changed, Serialized DAG is not re-written and last_updated - # column is not updated - dag_updated = SDM.write_dag(dag=example_bash_op_dag) - s_dag_1 = session.get(SDM, example_bash_op_dag.dag_id) + # Test that if DAG is not changed, Serialized DAG is not re-written and last_updated + # column is not updated + dag_updated = SDM.write_dag(dag=example_bash_op_dag) + s_dag_1 = SDM.get(example_bash_op_dag.dag_id) - assert s_dag_1.dag_hash == s_dag.dag_hash - assert s_dag.last_updated == s_dag_1.last_updated - assert dag_updated is False + assert s_dag_1.dag_hash == s_dag.dag_hash + assert s_dag.last_updated == s_dag_1.last_updated + assert dag_updated is False - # Update DAG - example_bash_op_dag.tags.add("new_tag") - assert example_bash_op_dag.tags == {"example", "example2", "new_tag"} + # Update DAG + example_bash_op_dag.tags.add("new_tag") + assert example_bash_op_dag.tags == {"example", "example2", "new_tag"} - dag_updated = SDM.write_dag(dag=example_bash_op_dag) - s_dag_2 = session.get(SDM, example_bash_op_dag.dag_id) + dag_updated = SDM.write_dag(dag=example_bash_op_dag) + s_dag_2 = SDM.get(example_bash_op_dag.dag_id) - assert s_dag.last_updated != s_dag_2.last_updated - assert s_dag.dag_hash != s_dag_2.dag_hash - assert s_dag_2.data["dag"]["tags"] == ["example", "example2", "new_tag"] - assert dag_updated is True + assert s_dag.last_updated != s_dag_2.last_updated + assert s_dag.dag_hash != s_dag_2.dag_hash + assert s_dag_2.data["dag"]["tags"] == ["example", "example2", "new_tag"] + assert dag_updated is True @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode def test_serialized_dag_is_updated_if_processor_subdir_changed(self): @@ -131,12 +133,12 @@ def test_serialized_dag_is_updated_if_processor_subdir_changed(self): assert dag_updated is True with create_session() as session: - s_dag = session.get(SDM, example_bash_op_dag.dag_id) + s_dag = SDM.get(example_bash_op_dag.dag_id) # Test that if DAG is not changed, Serialized DAG is not re-written and last_updated # column is not updated dag_updated = SDM.write_dag(dag=example_bash_op_dag, processor_subdir="/tmp/test") - s_dag_1 = session.get(SDM, example_bash_op_dag.dag_id) + s_dag_1 = SDM.get(example_bash_op_dag.dag_id) assert s_dag_1.dag_hash == s_dag.dag_hash assert s_dag.last_updated == s_dag_1.last_updated @@ -145,7 +147,7 @@ def test_serialized_dag_is_updated_if_processor_subdir_changed(self): # Update DAG dag_updated = SDM.write_dag(dag=example_bash_op_dag, processor_subdir="/tmp/other") - s_dag_2 = session.get(SDM, example_bash_op_dag.dag_id) + s_dag_2 = SDM.get(example_bash_op_dag.dag_id) assert s_dag.processor_subdir != s_dag_2.processor_subdir assert dag_updated is True @@ -162,6 +164,19 @@ def test_read_dags(self): assert serialized_dag.dag_id == dag.dag_id assert set(serialized_dag.task_dict) == set(dag.task_dict) + @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode + def test_read_all_dags_only_picks_the_latest_serdags(self, session): + example_dags = self._write_example_dags() + serialized_dags = SDM.read_all_dags() + assert len(example_dags) == len(serialized_dags) + + ex_dags = make_example_dags(example_dags_module) + SDM.write_dag(ex_dags.get("example_bash_operator"), processor_subdir="/tmp/") + serialized_dags2 = SDM.read_all_dags() + sdags = session.query(SDM).all() + # assert only the latest SDM is returned + assert len(sdags) != len(serialized_dags2) + @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode def test_remove_dags_by_id(self): """DAGs can be removed from database.""" @@ -190,26 +205,12 @@ def test_bulk_sync_to_db(self): DAG("dag_2", schedule=None), DAG("dag_3", schedule=None), ] - with assert_queries_count(10): + DAG.bulk_write_to_db(dags) + # we also write to dag_version and dag_code tables + # in dag_version, we search for unique version_name too + with assert_queries_count(24): SDM.bulk_sync_to_db(dags) - @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode - @pytest.mark.parametrize("dag_dependencies_fields", [{"dag_dependencies": None}, {}]) - def test_get_dag_dependencies_default_to_empty(self, dag_dependencies_fields): - """Test a pre-2.1.0 serialized DAG can deserialize DAG dependencies.""" - example_dags = make_example_dags(example_dags_module) - - with create_session() as session: - sdms = [SDM(dag) for dag in example_dags.values()] - # Simulate pre-2.1.0 format. - for sdm in sdms: - del sdm.data["dag"]["dag_dependencies"] - sdm.data["dag"].update(dag_dependencies_fields) - session.bulk_save_objects(sdms) - - expected_dependencies = {dag_id: [] for dag_id in example_dags} - assert SDM.get_dag_dependencies() == expected_dependencies - @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode def test_order_of_dag_params_is_stable(self): """ @@ -284,3 +285,31 @@ def get_hash_set(): first_hashes = get_hash_set() # assert that the hashes are the same assert first_hashes == get_hash_set() + + def test_get_latest_serdag_versions(self, dag_maker, session): + # first dag + with dag_maker("dag1") as dag: + EmptyOperator(task_id="task1") + dag.sync_to_db() + SDM.write_dag(dag) + with dag_maker("dag1") as dag: + EmptyOperator(task_id="task1") + EmptyOperator(task_id="task2") + dag.sync_to_db() + SDM.write_dag(dag) + # second dag + with dag_maker("dag2") as dag: + EmptyOperator(task_id="task1") + dag.sync_to_db() + SDM.write_dag(dag) + with dag_maker("dag2") as dag: + EmptyOperator(task_id="task1") + EmptyOperator(task_id="task2") + dag.sync_to_db() + SDM.write_dag(dag) + + # Total serdags should be 4 + assert session.scalar(select(func.count()).select_from(SDM)) == 4 + + latest_versions = SDM.get_latest_serialized_dags(dag_ids=["dag1", "dag2"], session=session) + assert len(latest_versions) == 2 diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 8a1df0594e4e..36b2d22f60fd 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -103,7 +103,7 @@ from tests_common.test_utils import db from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS from tests_common.test_utils.config import conf_vars -from tests_common.test_utils.db import clear_db_connections, clear_db_runs +from tests_common.test_utils.db import clear_db_connections, clear_db_dags, clear_db_runs from tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: @@ -2992,6 +2992,7 @@ def test_changing_of_asset_when_adrq_is_already_populated(self, dag_maker): Test that when a task that produces asset has ran, that changing the consumer dag asset will not cause primary key blank-out """ + clear_db_dags() from airflow.assets import Asset with dag_maker(schedule=None, serialized=True) as dag1: @@ -4013,6 +4014,7 @@ def test_refresh_from_db(self, create_task_instance): "next_method": None, "updated_at": None, "task_display_name": "Test Refresh from DB Task", + "dag_version_id": None, } # Make sure we aren't missing any new value in our expected_values list. expected_keys = {f"task_instance.{key}" for key in expected_values} diff --git a/tests/operators/test_trigger_dagrun.py b/tests/operators/test_trigger_dagrun.py index 52a11d10e5e3..0b40154b21b0 100644 --- a/tests/operators/test_trigger_dagrun.py +++ b/tests/operators/test_trigger_dagrun.py @@ -28,8 +28,6 @@ from airflow.models.dag import DagModel from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun -from airflow.models.log import Log -from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import TaskInstance from airflow.operators.trigger_dagrun import TriggerDagRunOperator from airflow.settings import TracebackSessionForTests @@ -39,6 +37,8 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.types import DagRunType +from tests_common.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs + pytestmark = pytest.mark.db_test DEFAULT_DATE = datetime(2019, 1, 1, tzinfo=timezone.utc) @@ -82,12 +82,9 @@ def re_sync_triggered_dag_to_db(self, dag, dag_maker): def teardown_method(self): """Cleanup state after testing in DB.""" - with create_session() as session: - session.query(Log).filter(Log.dag_id == TEST_DAG_ID).delete(synchronize_session=False) - for dbmodel in [DagModel, DagRun, TaskInstance, SerializedDagModel]: - session.query(dbmodel).filter(dbmodel.dag_id.in_([TRIGGERED_DAG_ID, TEST_DAG_ID])).delete( - synchronize_session=False - ) + clear_db_logs() + clear_db_runs() + clear_db_dags() # pathlib.Path(self._tmpfile).unlink() diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 43911c1a41d4..e2246a2f7513 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -124,6 +124,7 @@ def add_fake_task_group(self, target_states=None): with self.dag as dag: with TaskGroup(group_id=TEST_TASK_GROUP_ID) as task_group: _ = [EmptyOperator(task_id=f"task{i}") for i in range(len(target_states))] + dag.sync_to_db() SerializedDagModel.write_dag(dag) for idx, task in enumerate(task_group): @@ -146,7 +147,7 @@ def fake_mapped_task(x: int): fake_task() fake_mapped_task.expand(x=list(map_indexes)) - + dag.sync_to_db() SerializedDagModel.write_dag(dag) for task in task_group: diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index 47e93c1616d6..5df2c37cddb3 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -352,6 +352,7 @@ def test_no_models_missing(self): "rendered_task_instance_fields", # foreign key with TI "dag_priority_parsing_request", # Records are purged once per DAG Processing loop, not a # significant source of data. + "dag_version", # self-maintaining } from airflow.utils.db_cleanup import config_dict diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 19caafe55bc6..424fb02979cd 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -28,9 +28,9 @@ import time_machine from airflow import settings -from airflow.models.dag import DAG, DagModel +from airflow.models.dag import DAG from airflow.models.dagbag import DagBag -from airflow.models.dagcode import DagCode +from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import TaskInstance from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCom @@ -500,7 +500,7 @@ def test_code(admin_client): def test_code_from_db(admin_client): dag = DagBag(include_examples=True).get_dag("example_bash_operator") - DagCode(dag.fileloc, DagCode._get_code_from_file(dag.fileloc)).sync_to_db() + SerializedDagModel.write_dag(dag) url = "code?dag_id=example_bash_operator" resp = admin_client.get(url, follow_redirects=True) check_content_not_in_response("Failed to load DAG file Code", resp) @@ -510,7 +510,7 @@ def test_code_from_db(admin_client): def test_code_from_db_all_example_dags(admin_client): dagbag = DagBag(include_examples=True) for dag in dagbag.dags.values(): - DagCode(dag.fileloc, DagCode._get_code_from_file(dag.fileloc)).sync_to_db() + SerializedDagModel.write_dag(dag) url = "code?dag_id=example_bash_operator" resp = admin_client.get(url, follow_redirects=True) check_content_not_in_response("Failed to load DAG file Code", resp) @@ -614,23 +614,12 @@ def heartbeat(self): return True -@pytest.fixture -def new_id_example_bash_operator(): - dag_id = "example_bash_operator" - test_dag_id = "non_existent_dag" - with create_session() as session: - dag_query = session.query(DagModel).filter(DagModel.dag_id == dag_id) - dag_query.first().tags = [] # To avoid "FOREIGN KEY constraint" error) - with create_session() as session: - dag_query.update({"dag_id": test_dag_id}) - yield test_dag_id - with create_session() as session: - session.query(DagModel).filter(DagModel.dag_id == test_dag_id).update({"dag_id": dag_id}) - - -def test_delete_dag_button_for_dag_on_scheduler_only(admin_client, new_id_example_bash_operator): +def test_delete_dag_button_for_dag_on_scheduler_only(admin_client, dag_maker): + with dag_maker() as dag: + EmptyOperator(task_id="task") + dag.sync_to_db() # The delete-dag URL should be generated correctly - test_dag_id = new_id_example_bash_operator + test_dag_id = dag.dag_id resp = admin_client.get("/", follow_redirects=True) check_content_in_response(f"/delete?dag_id={test_dag_id}", resp) check_content_in_response(f"return confirmDeleteDag(this, '{test_dag_id}')", resp) @@ -1133,6 +1122,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "run_after_loop": { "custom_operator_name": None, @@ -1169,6 +1159,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "run_this_last": { "custom_operator_name": None, @@ -1205,6 +1196,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "runme_0": { "custom_operator_name": None, @@ -1241,6 +1233,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "runme_1": { "custom_operator_name": None, @@ -1277,6 +1270,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "runme_2": { "custom_operator_name": None, @@ -1313,6 +1307,7 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, "this_will_skip": { "custom_operator_name": None, @@ -1349,5 +1344,6 @@ def test_task_instances(admin_client): "try_number": 0, "unixname": getuser(), "updated_at": DEFAULT_DATE.isoformat(), + "dag_version_id": None, }, } diff --git a/tests_common/pytest_plugin.py b/tests_common/pytest_plugin.py index 2a35f79de14f..2a14511ebf23 100644 --- a/tests_common/pytest_plugin.py +++ b/tests_common/pytest_plugin.py @@ -884,11 +884,30 @@ def __exit__(self, type, value, traceback): self.serialized_model = SerializedDagModel( dag, processor_subdir=self.dag_model.processor_subdir ) - self.session.merge(self.serialized_model) + sdm = SerializedDagModel.get(dag.dag_id, session=self.session) + from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS and not sdm: + from airflow.models.dag_version import DagVersion + from airflow.models.dagcode import DagCode + + dagv = DagVersion.write_dag( + dag_id=dag.dag_id, + session=self.session, + version_name=dag.version_name, + ) + dag_code = DagCode(dagv, dag.fileloc, "Source") + self.session.merge(dag_code) + self.serialized_model.dag_version = dagv + if self.want_activate_assets: + self._activate_assets() + if sdm: + self.serialized_model = sdm + else: + self.session.merge(self.serialized_model) serialized_dag = self._serialized_dag() self._bag_dag_compat(serialized_dag) - if AIRFLOW_V_3_0_PLUS and self.want_activate_assets: - self._activate_assets() + self.session.flush() else: self._bag_dag_compat(self.dag) @@ -1007,16 +1026,30 @@ def cleanup(self): return # To isolate problems here with problems from elsewhere on the session object self.session.rollback() - - self.session.query(SerializedDagModel).filter( - SerializedDagModel.dag_id.in_(dag_ids) - ).delete(synchronize_session=False) - self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) + from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS: + from airflow.models.dag_version import DagVersion + + self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(DagVersion).filter(DagVersion.dag_id.in_(dag_ids)).delete( + synchronize_session=False + ) + else: + self.session.query(SerializedDagModel).filter( + SerializedDagModel.dag_id.in_(dag_ids) + ).delete(synchronize_session=False) + self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete( synchronize_session=False, ) diff --git a/tests_common/test_utils/db.py b/tests_common/test_utils/db.py index d37a8e942e11..14f1007af9a8 100644 --- a/tests_common/test_utils/db.py +++ b/tests_common/test_utils/db.py @@ -114,6 +114,7 @@ def clear_db_dags(): session.query(DagTag).delete() session.query(DagOwnerAttributes).delete() session.query(DagModel).delete() + session.query(DagCode).delete() def drop_tables_with_prefix(prefix): From ccd65867387117cd4503715195a877a1ac2892a2 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 5 Nov 2024 15:26:18 +0100 Subject: [PATCH 040/137] Detect situation where Breeze is installed with both pipx and uv (#43694) When breeze is installed with both - pipx and uv, we do not know which version is available first on the path and self-upgrading breeze might not upgrade the one that is first. Therefore we detect that situation and fail self upgrade with appropriate instructions what to do (recommending leaving uv as faster) --- .../src/airflow_breeze/utils/reinstall.py | 38 +++++++++++++++---- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/reinstall.py b/dev/breeze/src/airflow_breeze/utils/reinstall.py index 6165c8a30720..6fdf994c6e91 100644 --- a/dev/breeze/src/airflow_breeze/utils/reinstall.py +++ b/dev/breeze/src/airflow_breeze/utils/reinstall.py @@ -37,14 +37,38 @@ def reinstall_breeze(breeze_sources: Path, re_run: bool = True): # Breeze from different sources than originally installed (i.e. when we reinstall airflow # From the current directory. get_console().print(f"\n[info]Reinstalling Breeze from {breeze_sources}\n") - result = subprocess.run(["uv", "tool", "list"], text=True, capture_output=True, check=False) - if result.returncode == 0: - if "apache-airflow-breeze" in result.stdout: - subprocess.check_call( - ["uv", "tool", "install", "--force", "--reinstall", "-e", breeze_sources.as_posix()] - ) - else: + breeze_installed_with_uv = False + breeze_installed_with_pipx = False + result_uv = subprocess.run(["uv", "tool", "list"], text=True, capture_output=True, check=False) + if result_uv.returncode == 0: + if "apache-airflow-breeze" in result_uv.stdout: + breeze_installed_with_uv = True + result_pipx = subprocess.run(["pipx", "list"], text=True, capture_output=True, check=False) + if result_pipx.returncode == 0: + if "apache-airflow-breeze" in result_pipx.stdout: + breeze_installed_with_pipx = True + if breeze_installed_with_uv and breeze_installed_with_pipx: + get_console().print( + "[error]Breeze is installed both with `uv` and `pipx`. This is not supported.[/]\n" + ) + get_console().print( + "[info]Please uninstall Breeze and install it only with one of the methods[/]\n" + "[info]The `uv` installation method is recommended as it is much faster[/]\n" + ) + get_console().print( + "To uninstall Breeze installed with pipx run:\n pipx uninstall apache-airflow-breeze\n" + ) + get_console().print( + "To uninstall Breeze installed with uv run:\n uv tool uninstall apache-airflow-breeze\n" + ) + sys.exit(1) + elif breeze_installed_with_uv: + subprocess.check_call( + ["uv", "tool", "install", "--force", "--reinstall", "-e", breeze_sources.as_posix()] + ) + elif breeze_installed_with_pipx: subprocess.check_call(["pipx", "install", "-e", breeze_sources.as_posix(), "--force"]) + if re_run: # Make sure we don't loop forever if the metadata hash hasn't been updated yet (else it is tricky to # run pre-commit checks via breeze!) From 58ceb323916d8412dafbcd03c1e99f8c36e12faa Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU <68415893+jason810496@users.noreply.github.com> Date: Tue, 5 Nov 2024 22:30:38 +0800 Subject: [PATCH 041/137] AIP-84 Get Import Error / Get Import Errors (#43637) * AIP-84 Get Import Error * Small adjustments --------- Co-authored-by: pierrejeambrun --- .../endpoints/import_error_endpoint.py | 3 + airflow/api_fastapi/common/parameters.py | 2 + .../core_api/openapi/v1-generated.yaml | 138 +++++++++++ .../core_api/routes/public/__init__.py | 2 + .../core_api/routes/public/import_error.py | 105 +++++++++ .../core_api/serializers/import_error.py | 39 ++++ airflow/ui/openapi-gen/queries/common.ts | 45 ++++ airflow/ui/openapi-gen/queries/prefetch.ts | 54 +++++ airflow/ui/openapi-gen/queries/queries.ts | 67 ++++++ airflow/ui/openapi-gen/queries/suspense.ts | 67 ++++++ .../ui/openapi-gen/requests/schemas.gen.ts | 46 ++++ .../ui/openapi-gen/requests/services.gen.ts | 61 +++++ airflow/ui/openapi-gen/requests/types.gen.ts | 82 +++++++ .../routes/public/test_import_error.py | 219 ++++++++++++++++++ 14 files changed, 930 insertions(+) create mode 100644 airflow/api_fastapi/core_api/routes/public/import_error.py create mode 100644 airflow/api_fastapi/core_api/serializers/import_error.py create mode 100644 tests/api_fastapi/core_api/routes/public/test_import_error.py diff --git a/airflow/api_connexion/endpoints/import_error_endpoint.py b/airflow/api_connexion/endpoints/import_error_endpoint.py index 76b706eac1ae..633dd0bebde5 100644 --- a/airflow/api_connexion/endpoints/import_error_endpoint.py +++ b/airflow/api_connexion/endpoints/import_error_endpoint.py @@ -31,6 +31,7 @@ from airflow.auth.managers.models.resource_details import AccessView, DagDetails from airflow.models.dag import DagModel from airflow.models.errors import ParseImportError +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.session import NEW_SESSION, provide_session from airflow.www.extensions.init_auth_manager import get_auth_manager @@ -41,6 +42,7 @@ from airflow.auth.managers.models.batch_apis import IsAuthorizedDagRequest +@mark_fastapi_migration_done @security.requires_access_view(AccessView.IMPORT_ERRORS) @provide_session def get_import_error(*, import_error_id: int, session: Session = NEW_SESSION) -> APIResponse: @@ -72,6 +74,7 @@ def get_import_error(*, import_error_id: int, session: Session = NEW_SESSION) -> return import_error_schema.dump(error) +@mark_fastapi_migration_done @security.requires_access_view(AccessView.IMPORT_ERRORS) @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_fastapi/common/parameters.py b/airflow/api_fastapi/common/parameters.py index bd6501763722..218077ca59e5 100644 --- a/airflow/api_fastapi/common/parameters.py +++ b/airflow/api_fastapi/common/parameters.py @@ -32,6 +32,7 @@ from airflow.models.dag import DagModel, DagTag from airflow.models.dagrun import DagRun from airflow.models.dagwarning import DagWarning, DagWarningType +from airflow.models.errors import ParseImportError from airflow.utils import timezone from airflow.utils.state import DagRunState @@ -158,6 +159,7 @@ class SortParam(BaseParam[str]): "last_run_state": DagRun.state, "last_run_start_date": DagRun.start_date, "connection_id": Connection.conn_id, + "import_error_id": ParseImportError.id, } def __init__( diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index e844cbceeb47..28e38884803a 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1538,6 +1538,105 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/importErrors/{import_error_id}: + get: + tags: + - Import Error + summary: Get Import Error + description: Get an import error. + operationId: get_import_error + parameters: + - name: import_error_id + in: path + required: true + schema: + type: integer + title: Import Error Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ImportErrorResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/importErrors/: + get: + tags: + - Import Error + summary: Get Import Errors + description: Get all import errors. + operationId: get_import_errors + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ImportErrorCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/monitor/health: get: tags: @@ -3454,6 +3553,45 @@ components: - task_instance_states title: HistoricalMetricDataResponse description: Historical Metric Data serializer for responses. + ImportErrorCollectionResponse: + properties: + import_errors: + items: + $ref: '#/components/schemas/ImportErrorResponse' + type: array + title: Import Errors + total_entries: + type: integer + title: Total Entries + type: object + required: + - import_errors + - total_entries + title: ImportErrorCollectionResponse + description: Import Error Collection Response. + ImportErrorResponse: + properties: + import_error_id: + type: integer + title: Import Error Id + timestamp: + type: string + format: date-time + title: Timestamp + filename: + type: string + title: Filename + stack_trace: + type: string + title: Stack Trace + type: object + required: + - import_error_id + - timestamp + - filename + - stack_trace + title: ImportErrorResponse + description: Import Error Response. JobResponse: properties: id: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index a443f5a28ae8..68caa2d77545 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -25,6 +25,7 @@ from airflow.api_fastapi.core_api.routes.public.dag_warning import dag_warning_router from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router +from airflow.api_fastapi.core_api.routes.public.import_error import import_error_router from airflow.api_fastapi.core_api.routes.public.monitor import monitor_router from airflow.api_fastapi.core_api.routes.public.plugins import plugins_router from airflow.api_fastapi.core_api.routes.public.pools import pools_router @@ -43,6 +44,7 @@ public_router.include_router(dag_sources_router) public_router.include_router(dags_router) public_router.include_router(event_logs_router) +public_router.include_router(import_error_router) public_router.include_router(monitor_router) public_router.include_router(dag_warning_router) public_router.include_router(plugins_router) diff --git a/airflow/api_fastapi/core_api/routes/public/import_error.py b/airflow/api_fastapi/core_api/routes/public/import_error.py new file mode 100644 index 000000000000..9007d6ff891b --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/import_error.py @@ -0,0 +1,105 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException, status +from sqlalchemy import select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryLimit, + QueryOffset, + SortParam, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.import_error import ( + ImportErrorCollectionResponse, + ImportErrorResponse, +) +from airflow.models.errors import ParseImportError + +import_error_router = AirflowRouter(tags=["Import Error"], prefix="/importErrors") + + +@import_error_router.get( + "/{import_error_id}", + responses=create_openapi_http_exception_doc( + [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND] + ), +) +async def get_import_error( + import_error_id: int, + session: Annotated[Session, Depends(get_session)], +) -> ImportErrorResponse: + """Get an import error.""" + error = session.scalar(select(ParseImportError).where(ParseImportError.id == import_error_id)) + if error is None: + raise HTTPException(404, f"The ImportError with import_error_id: `{import_error_id}` was not found") + + return ImportErrorResponse.model_validate( + error, + from_attributes=True, + ) + + +@import_error_router.get( + "/", + responses=create_openapi_http_exception_doc([status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]), +) +async def get_import_errors( + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + [ + "id", + "import_error_id", + "timestamp", + "filename", + "stacktrace", + ], + ParseImportError, + ).dynamic_depends() + ), + ], + session: Annotated[Session, Depends(get_session)], +) -> ImportErrorCollectionResponse: + """Get all import errors.""" + import_errors_select, total_entries = paginated_select( + select(ParseImportError), + [], + order_by, + offset, + limit, + session, + ) + import_errors = session.scalars(import_errors_select).all() + + return ImportErrorCollectionResponse( + import_errors=[ + ImportErrorResponse.model_validate(error, from_attributes=True) for error in import_errors + ], + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/serializers/import_error.py b/airflow/api_fastapi/core_api/serializers/import_error.py new file mode 100644 index 000000000000..ebc65e23eccb --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/import_error.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, ConfigDict, Field + + +class ImportErrorResponse(BaseModel): + """Import Error Response.""" + + id: int = Field(alias="import_error_id") + timestamp: datetime + filename: str + stacktrace: str = Field(alias="stack_trace") + + model_config = ConfigDict(populate_by_name=True) + + +class ImportErrorCollectionResponse(BaseModel): + """Import Error Collection Response.""" + + import_errors: list[ImportErrorResponse] + total_entries: int diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 1248a77ce188..36ea524e0138 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -12,6 +12,7 @@ import { DagsService, DashboardService, EventLogService, + ImportErrorService, MonitorService, PluginService, PoolService, @@ -421,6 +422,50 @@ export const UseEventLogServiceGetEventLogsKeyFn = ( }, ]), ]; +export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorQueryResult< + TData = ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useImportErrorServiceGetImportErrorKey = + "ImportErrorServiceGetImportError"; +export const UseImportErrorServiceGetImportErrorKeyFn = ( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: Array, +) => [ + useImportErrorServiceGetImportErrorKey, + ...(queryKey ?? [{ importErrorId }]), +]; +export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorsQueryResult< + TData = ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useImportErrorServiceGetImportErrorsKey = + "ImportErrorServiceGetImportErrors"; +export const UseImportErrorServiceGetImportErrorsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [ + useImportErrorServiceGetImportErrorsKey, + ...(queryKey ?? [{ limit, offset, orderBy }]), +]; export type MonitorServiceGetHealthDefaultResponse = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index bf6ad800be01..6c41b7a5a1b5 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -12,6 +12,7 @@ import { DagsService, DashboardService, EventLogService, + ImportErrorService, MonitorService, PluginService, PoolService, @@ -543,6 +544,59 @@ export const prefetchUseEventLogServiceGetEventLogs = ( tryNumber, }), }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const prefetchUseImportErrorServiceGetImportError = ( + queryClient: QueryClient, + { + importErrorId, + }: { + importErrorId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ + importErrorId, + }), + queryFn: () => ImportErrorService.getImportError({ importErrorId }), + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseImportErrorServiceGetImportErrors = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ + limit, + offset, + orderBy, + }), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }), + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 70796be40171..f4b7c41195f7 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -17,6 +17,7 @@ import { DagsService, DashboardService, EventLogService, + ImportErrorService, MonitorService, PluginService, PoolService, @@ -671,6 +672,72 @@ export const useEventLogServiceGetEventLogs = < }) as TData, ...options, }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportError = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( + { importErrorId }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrors = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, + ...options, + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 4f75c2ba0ce4..2870605672be 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -12,6 +12,7 @@ import { DagsService, DashboardService, EventLogService, + ImportErrorService, MonitorService, PluginService, PoolService, @@ -657,6 +658,72 @@ export const useEventLogServiceGetEventLogsSuspense = < }) as TData, ...options, }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrorSuspense = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( + { importErrorId }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrorsSuspense = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, + ...options, + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 53272eae2e77..517743af17f6 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1740,6 +1740,52 @@ export const $HistoricalMetricDataResponse = { description: "Historical Metric Data serializer for responses.", } as const; +export const $ImportErrorCollectionResponse = { + properties: { + import_errors: { + items: { + $ref: "#/components/schemas/ImportErrorResponse", + }, + type: "array", + title: "Import Errors", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["import_errors", "total_entries"], + title: "ImportErrorCollectionResponse", + description: "Import Error Collection Response.", +} as const; + +export const $ImportErrorResponse = { + properties: { + import_error_id: { + type: "integer", + title: "Import Error Id", + }, + timestamp: { + type: "string", + format: "date-time", + title: "Timestamp", + }, + filename: { + type: "string", + title: "Filename", + }, + stack_trace: { + type: "string", + title: "Stack Trace", + }, + }, + type: "object", + required: ["import_error_id", "timestamp", "filename", "stack_trace"], + title: "ImportErrorResponse", + description: "Import Error Response.", +} as const; + export const $JobResponse = { properties: { id: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index fa5c7739c909..5597b0a6a9cf 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -53,6 +53,10 @@ import type { GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, + GetImportErrorData, + GetImportErrorResponse, + GetImportErrorsData, + GetImportErrorsResponse, GetHealthResponse, ListDagWarningsData, ListDagWarningsResponse, @@ -865,6 +869,63 @@ export class EventLogService { } } +export class ImportErrorService { + /** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ + public static getImportError( + data: GetImportErrorData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/importErrors/{import_error_id}", + path: { + import_error_id: data.importErrorId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ + public static getImportErrors( + data: GetImportErrorsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/importErrors/", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + export class MonitorService { /** * Get Health diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 909b78dd627e..e3071b64936a 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -416,6 +416,24 @@ export type HistoricalMetricDataResponse = { task_instance_states: airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState; }; +/** + * Import Error Collection Response. + */ +export type ImportErrorCollectionResponse = { + import_errors: Array; + total_entries: number; +}; + +/** + * Import Error Response. + */ +export type ImportErrorResponse = { + import_error_id: number; + timestamp: string; + filename: string; + stack_trace: string; +}; + /** * Job serializer for responses. */ @@ -878,6 +896,20 @@ export type GetEventLogsData = { export type GetEventLogsResponse = EventLogCollectionResponse; +export type GetImportErrorData = { + importErrorId: number; +}; + +export type GetImportErrorResponse = ImportErrorResponse; + +export type GetImportErrorsData = { + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type GetImportErrorsResponse = ImportErrorCollectionResponse; + export type GetHealthResponse = HealthInfoSchema; export type ListDagWarningsData = { @@ -1651,6 +1683,56 @@ export type $OpenApiTs = { }; }; }; + "/public/importErrors/{import_error_id}": { + get: { + req: GetImportErrorData; + res: { + /** + * Successful Response + */ + 200: ImportErrorResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/importErrors/": { + get: { + req: GetImportErrorsData; + res: { + /** + * Successful Response + */ + 200: ImportErrorCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/monitor/health": { get: { res: { diff --git a/tests/api_fastapi/core_api/routes/public/test_import_error.py b/tests/api_fastapi/core_api/routes/public/test_import_error.py new file mode 100644 index 000000000000..4271c05b6b35 --- /dev/null +++ b/tests/api_fastapi/core_api/routes/public/test_import_error.py @@ -0,0 +1,219 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timezone + +import pytest + +from airflow.models.errors import ParseImportError +from airflow.utils.session import provide_session + +from tests_common.test_utils.db import clear_db_import_errors + +pytestmark = pytest.mark.db_test + +FILENAME1 = "test_filename1.py" +FILENAME2 = "test_filename2.py" +FILENAME3 = "Lorem_ipsum.py" +STACKTRACE1 = "test_stacktrace1" +STACKTRACE2 = "test_stacktrace2" +STACKTRACE3 = "Lorem ipsum dolor sit amet, consectetur adipiscing elit." +TIMESTAMP1 = datetime(2024, 6, 15, 1, 0, tzinfo=timezone.utc) +TIMESTAMP2 = datetime(2024, 6, 15, 5, 0, tzinfo=timezone.utc) +TIMESTAMP3 = datetime(2024, 6, 15, 3, 0, tzinfo=timezone.utc) +IMPORT_ERROR_NON_EXISTED_ID = 9999 +IMPORT_ERROR_NON_EXISTED_KEY = "non_existed_key" + + +class TestImportErrorEndpoint: + """Common class for /public/importErrors related unit tests.""" + + @staticmethod + def _clear_db(): + clear_db_import_errors() + + @pytest.fixture(autouse=True) + @provide_session + def setup(self, session=None) -> dict[str, ParseImportError]: + """ + Setup method which is run before every test. + """ + self._clear_db() + import_error1 = ParseImportError( + filename=FILENAME1, + stacktrace=STACKTRACE1, + timestamp=TIMESTAMP1, + ) + import_error2 = ParseImportError( + filename=FILENAME2, + stacktrace=STACKTRACE2, + timestamp=TIMESTAMP2, + ) + import_error3 = ParseImportError( + filename=FILENAME3, + stacktrace=STACKTRACE3, + timestamp=TIMESTAMP3, + ) + session.add_all([import_error1, import_error2, import_error3]) + session.commit() + return {FILENAME1: import_error1, FILENAME2: import_error2, FILENAME3: import_error3} + + def teardown_method(self) -> None: + self._clear_db() + + +class TestGetImportError(TestImportErrorEndpoint): + @pytest.mark.parametrize( + "import_error_key, expected_status_code, expected_body", + [ + ( + FILENAME1, + 200, + { + "import_error_id": 1, + "timestamp": TIMESTAMP1, + "filename": FILENAME1, + "stack_trace": STACKTRACE1, + }, + ), + ( + FILENAME2, + 200, + { + "import_error_id": 2, + "timestamp": TIMESTAMP2, + "filename": FILENAME2, + "stack_trace": STACKTRACE2, + }, + ), + (IMPORT_ERROR_NON_EXISTED_KEY, 404, {}), + ], + ) + def test_get_import_error( + self, test_client, setup, import_error_key, expected_status_code, expected_body + ): + import_error: ParseImportError | None = setup.get(import_error_key) + import_error_id = import_error.id if import_error else IMPORT_ERROR_NON_EXISTED_ID + response = test_client.get(f"/public/importErrors/{import_error_id}") + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + expected_json = { + "import_error_id": import_error_id, + "timestamp": expected_body["timestamp"].isoformat().replace("+00:00", "Z"), + "filename": expected_body["filename"], + "stack_trace": expected_body["stack_trace"], + } + assert response.json() == expected_json + + +class TestGetImportErrors(TestImportErrorEndpoint): + @pytest.mark.parametrize( + "query_params, expected_status_code, expected_total_entries, expected_filenames", + [ + ( + {}, + 200, + 3, + [FILENAME1, FILENAME2, FILENAME3], + ), + # offset, limit + ( + {"limit": 1, "offset": 1}, + 200, + 3, + [FILENAME2], + ), + ( + {"limit": 1, "offset": 2}, + 200, + 3, + [FILENAME3], + ), + # order_by + ( + {"order_by": "-filename"}, + 200, + 3, + [FILENAME2, FILENAME1, FILENAME3], + ), + ( + {"order_by": "timestamp"}, + 200, + 3, + [FILENAME1, FILENAME3, FILENAME2], + ), + ( + {"order_by": "import_error_id"}, + 200, + 3, + [FILENAME1, FILENAME2, FILENAME3], + ), + ( + {"order_by": "-import_error_id"}, + 200, + 3, + [FILENAME3, FILENAME2, FILENAME1], + ), + # invalid order_by + ( + {"order_by": "invalid_order_by"}, + 400, + 0, + [], + ), + # combination of query parameters + ( + {"limit": 2, "offset": 1, "order_by": "-filename"}, + 200, + 3, + [FILENAME1, FILENAME3], + ), + ( + {"limit": 1, "offset": 2, "order_by": "-filename"}, + 200, + 3, + [FILENAME3], + ), + ( + {"limit": 5, "offset": 1, "order_by": "timestamp"}, + 200, + 3, + [FILENAME3, FILENAME2], + ), + ], + ) + def test_get_import_errors( + self, + test_client, + query_params, + expected_status_code, + expected_total_entries, + expected_filenames, + ): + response = test_client.get("/public/importErrors", params=query_params) + + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + + response_json = response.json() + assert response_json["total_entries"] == expected_total_entries + assert [ + import_error["filename"] for import_error in response_json["import_errors"] + ] == expected_filenames From c96b618b60ed049658470a9696479c0df36957af Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Tue, 5 Nov 2024 22:40:28 +0800 Subject: [PATCH 042/137] Disable XCom list ordering by execution_date (#43680) * Disable XCom list ordering by execution_date * Update airflow/www/views.py Co-authored-by: Kaxil Naik --------- Co-authored-by: Kaxil Naik --- airflow/www/views.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/airflow/www/views.py b/airflow/www/views.py index cce2ec8b88c8..dd8279b56007 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -3858,6 +3858,17 @@ class XComModelView(AirflowModelView): list_columns = ["key", "value", "timestamp", "dag_id", "task_id", "run_id", "map_index", "execution_date"] base_order = ("dag_run_id", "desc") + order_columns = [ + "key", + "value", + "timestamp", + "dag_id", + "task_id", + "run_id", + "map_index", + # "execution_date", # execution_date sorting is not working and crashing the UI, disabled for now. + ] + base_filters = [["dag_id", DagFilter, list]] formatters_columns = { From 1328d1a03401c229d723697642cada78b06a00db Mon Sep 17 00:00:00 2001 From: Bugra Ozturk Date: Tue, 5 Nov 2024 16:29:17 +0100 Subject: [PATCH 043/137] AIP-84 Migrate post a connection to FastAPI API (#43396) * Migrate Create a Connection to FastAPI * Remove additional duplicate comment * Include password in connection and move dashboard.py to serializers/ui/ * Fix test for password * Include password field to response and redact it, run pre-commit after rebase * Convert redact to field_validator and fix tests * Pass field name into redact * run pre-commit after rebase --- .../endpoints/connection_endpoint.py | 1 + .../core_api/openapi/v1-generated.yaml | 102 ++++++++++++++- .../core_api/routes/public/connections.py | 23 ++++ .../core_api/routes/ui/dashboard.py | 2 +- .../core_api/serializers/connections.py | 25 ++++ .../serializers/{ => ui}/dashboard.py | 0 airflow/ui/openapi-gen/queries/common.ts | 3 + airflow/ui/openapi-gen/queries/queries.ts | 40 ++++++ .../ui/openapi-gen/requests/schemas.gen.ts | 110 +++++++++++++++- .../ui/openapi-gen/requests/services.gen.ts | 27 ++++ airflow/ui/openapi-gen/requests/types.gen.ts | 51 +++++++- .../routes/public/test_connections.py | 119 ++++++++++++++++++ 12 files changed, 496 insertions(+), 7 deletions(-) rename airflow/api_fastapi/core_api/serializers/{ => ui}/dashboard.py (100%) diff --git a/airflow/api_connexion/endpoints/connection_endpoint.py b/airflow/api_connexion/endpoints/connection_endpoint.py index 37c91c44eb69..c0c2fcbf4610 100644 --- a/airflow/api_connexion/endpoints/connection_endpoint.py +++ b/airflow/api_connexion/endpoints/connection_endpoint.py @@ -151,6 +151,7 @@ def patch_connection( return connection_schema.dump(connection) +@mark_fastapi_migration_done @security.requires_access_connection("POST") @provide_session @action_logging( diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 28e38884803a..06a041b55daa 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1104,6 +1104,49 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Connection + summary: Post Connection + description: Create connection entry. + operationId: post_connection + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}: get: tags: @@ -2515,6 +2558,55 @@ components: - status title: BaseInfoSchema description: Base status field for metadatabase and scheduler. + ConnectionBody: + properties: + connection_id: + type: string + title: Connection Id + conn_type: + type: string + title: Conn Type + description: + anyOf: + - type: string + - type: 'null' + title: Description + host: + anyOf: + - type: string + - type: 'null' + title: Host + login: + anyOf: + - type: string + - type: 'null' + title: Login + schema: + anyOf: + - type: string + - type: 'null' + title: Schema + port: + anyOf: + - type: integer + - type: 'null' + title: Port + password: + anyOf: + - type: string + - type: 'null' + title: Password + extra: + anyOf: + - type: string + - type: 'null' + title: Extra + type: object + required: + - connection_id + - conn_type + title: ConnectionBody + description: Connection Serializer for requests body. ConnectionCollectionResponse: properties: connections: @@ -2564,6 +2656,11 @@ components: - type: integer - type: 'null' title: Port + password: + anyOf: + - type: string + - type: 'null' + title: Password extra: anyOf: - type: string @@ -2578,6 +2675,7 @@ components: - login - schema - port + - password - extra title: ConnectionResponse description: Connection serializer for responses. @@ -3545,7 +3643,7 @@ components: dag_run_states: $ref: '#/components/schemas/DAGRunStates' task_instance_states: - $ref: '#/components/schemas/airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState' + $ref: '#/components/schemas/airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState' type: object required: - dag_run_types @@ -4224,7 +4322,7 @@ components: - git_version title: VersionInfo description: Version information serializer for responses. - airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState: + airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState: properties: no_status: type: integer diff --git a/airflow/api_fastapi/core_api/routes/public/connections.py b/airflow/api_fastapi/core_api/routes/public/connections.py index 8d9f9ddb8ebf..a31c97c91488 100644 --- a/airflow/api_fastapi/core_api/routes/public/connections.py +++ b/airflow/api_fastapi/core_api/routes/public/connections.py @@ -26,10 +26,12 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.serializers.connections import ( + ConnectionBody, ConnectionCollectionResponse, ConnectionResponse, ) from airflow.models import Connection +from airflow.utils import helpers connections_router = AirflowRouter(tags=["Connection"], prefix="/connections") @@ -114,3 +116,24 @@ async def get_connections( ], total_entries=total_entries, ) + + +@connections_router.post("/", status_code=201, responses=create_openapi_http_exception_doc([401, 403, 409])) +async def post_connection( + post_body: ConnectionBody, + session: Annotated[Session, Depends(get_session)], +) -> ConnectionResponse: + """Create connection entry.""" + try: + helpers.validate_key(post_body.connection_id, max_length=200) + except Exception as e: + raise HTTPException(400, f"{e}") + + connection = session.scalar(select(Connection).filter_by(conn_id=post_body.connection_id)) + if connection is not None: + raise HTTPException(409, f"Connection with connection_id: `{post_body.connection_id}` already exists") + + connection = Connection(**post_body.model_dump(by_alias=True)) + session.add(connection) + + return ConnectionResponse.model_validate(connection, from_attributes=True) diff --git a/airflow/api_fastapi/core_api/routes/ui/dashboard.py b/airflow/api_fastapi/core_api/routes/ui/dashboard.py index e101ca78be7d..0eeea4d0dc15 100644 --- a/airflow/api_fastapi/core_api/routes/ui/dashboard.py +++ b/airflow/api_fastapi/core_api/routes/ui/dashboard.py @@ -25,7 +25,7 @@ from airflow.api_fastapi.common.parameters import DateTimeQuery from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.serializers.dashboard import HistoricalMetricDataResponse +from airflow.api_fastapi.core_api.serializers.ui.dashboard import HistoricalMetricDataResponse from airflow.models.dagrun import DagRun, DagRunType from airflow.models.taskinstance import TaskInstance from airflow.utils.state import DagRunState, TaskInstanceState diff --git a/airflow/api_fastapi/core_api/serializers/connections.py b/airflow/api_fastapi/core_api/serializers/connections.py index 1cc069cac0cb..c5956b6ec517 100644 --- a/airflow/api_fastapi/core_api/serializers/connections.py +++ b/airflow/api_fastapi/core_api/serializers/connections.py @@ -20,10 +20,12 @@ import json from pydantic import BaseModel, Field, field_validator +from pydantic_core.core_schema import ValidationInfo from airflow.utils.log.secrets_masker import redact +# Response Models class ConnectionResponse(BaseModel): """Connection serializer for responses.""" @@ -34,8 +36,16 @@ class ConnectionResponse(BaseModel): login: str | None schema_: str | None = Field(alias="schema") port: int | None + password: str | None extra: str | None + @field_validator("password", mode="after") + @classmethod + def redact_password(cls, v: str | None, field_info: ValidationInfo) -> str | None: + if v is None: + return None + return redact(v, field_info.field_name) + @field_validator("extra", mode="before") @classmethod def redact_extra(cls, v: str | None) -> str | None: @@ -55,3 +65,18 @@ class ConnectionCollectionResponse(BaseModel): connections: list[ConnectionResponse] total_entries: int + + +# Request Models +class ConnectionBody(BaseModel): + """Connection Serializer for requests body.""" + + connection_id: str = Field(serialization_alias="conn_id") + conn_type: str + description: str | None = Field(default=None) + host: str | None = Field(default=None) + login: str | None = Field(default=None) + schema_: str | None = Field(None, alias="schema") + port: int | None = Field(default=None) + password: str | None = Field(default=None) + extra: str | None = Field(default=None) diff --git a/airflow/api_fastapi/core_api/serializers/dashboard.py b/airflow/api_fastapi/core_api/serializers/ui/dashboard.py similarity index 100% rename from airflow/api_fastapi/core_api/serializers/dashboard.py rename to airflow/api_fastapi/core_api/serializers/ui/dashboard.py diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 36ea524e0138..2ed842201c2e 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -682,6 +682,9 @@ export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ export type BackfillServiceCreateBackfillMutationResult = Awaited< ReturnType >; +export type ConnectionServicePostConnectionMutationResult = Awaited< + ReturnType +>; export type PoolServicePostPoolMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index f4b7c41195f7..583f14f7711d 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -28,6 +28,7 @@ import { } from "../requests/services.gen"; import { BackfillPostBody, + ConnectionBody, DAGPatchBody, DAGRunPatchBody, DagRunState, @@ -1130,6 +1131,45 @@ export const useBackfillServiceCreateBackfill = < }) as unknown as Promise, ...options, }); +/** + * Post Connection + * Create connection entry. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServicePostConnection = < + TData = Common.ConnectionServicePostConnectionMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + ConnectionService.postConnection({ + requestBody, + }) as unknown as Promise, + ...options, + }); /** * Post Pool * Create a Pool. diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 517743af17f6..c1dc8cd34576 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -151,6 +151,100 @@ export const $BaseInfoSchema = { description: "Base status field for metadatabase and scheduler.", } as const; +export const $ConnectionBody = { + properties: { + connection_id: { + type: "string", + title: "Connection Id", + }, + conn_type: { + type: "string", + title: "Conn Type", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + host: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Host", + }, + login: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Login", + }, + schema: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Schema", + }, + port: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Port", + }, + password: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Password", + }, + extra: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Extra", + }, + }, + type: "object", + required: ["connection_id", "conn_type"], + title: "ConnectionBody", + description: "Connection Serializer for requests body.", +} as const; + export const $ConnectionCollectionResponse = { properties: { connections: { @@ -236,6 +330,17 @@ export const $ConnectionResponse = { ], title: "Port", }, + password: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Password", + }, extra: { anyOf: [ { @@ -257,6 +362,7 @@ export const $ConnectionResponse = { "login", "schema", "port", + "password", "extra", ], title: "ConnectionResponse", @@ -1731,7 +1837,7 @@ export const $HistoricalMetricDataResponse = { $ref: "#/components/schemas/DAGRunStates", }, task_instance_states: { - $ref: "#/components/schemas/airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState", + $ref: "#/components/schemas/airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState", }, }, type: "object", @@ -2766,7 +2872,7 @@ export const $VersionInfo = { description: "Version information serializer for responses.", } as const; -export const $airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState = +export const $airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState = { properties: { no_status: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 5597b0a6a9cf..4eecb848a57c 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -41,6 +41,8 @@ import type { GetConnectionResponse, GetConnectionsData, GetConnectionsResponse, + PostConnectionData, + PostConnectionResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, @@ -661,6 +663,31 @@ export class ConnectionService { }, }); } + + /** + * Post Connection + * Create connection entry. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ + public static postConnection( + data: PostConnectionData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/connections/", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } } export class DagRunService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index e3071b64936a..603a20d09003 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -43,6 +43,21 @@ export type BaseInfoSchema = { status: string | null; }; +/** + * Connection Serializer for requests body. + */ +export type ConnectionBody = { + connection_id: string; + conn_type: string; + description?: string | null; + host?: string | null; + login?: string | null; + schema?: string | null; + port?: number | null; + password?: string | null; + extra?: string | null; +}; + /** * Connection Collection serializer for responses. */ @@ -62,6 +77,7 @@ export type ConnectionResponse = { login: string | null; schema: string | null; port: number | null; + password: string | null; extra: string | null; }; @@ -413,7 +429,7 @@ export type HealthInfoSchema = { export type HistoricalMetricDataResponse = { dag_run_types: DAGRunTypes; dag_run_states: DAGRunStates; - task_instance_states: airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState; + task_instance_states: airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState; }; /** @@ -651,7 +667,7 @@ export type VersionInfo = { /** * TaskInstance serializer for responses. */ -export type airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState = +export type airflow__api_fastapi__core_api__serializers__ui__dashboard__TaskInstanceState = { no_status: number; removed: number; @@ -841,6 +857,12 @@ export type GetConnectionsData = { export type GetConnectionsResponse = ConnectionCollectionResponse; +export type PostConnectionData = { + requestBody: ConnectionBody; +}; + +export type PostConnectionResponse = ConnectionResponse; + export type GetDagRunData = { dagId: string; dagRunId: string; @@ -1512,6 +1534,31 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + post: { + req: PostConnectionData; + res: { + /** + * Successful Response + */ + 201: ConnectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { get: { diff --git a/tests/api_fastapi/core_api/routes/public/test_connections.py b/tests/api_fastapi/core_api/routes/public/test_connections.py index ee9c80219eb1..1dc3cf9d2cd4 100644 --- a/tests/api_fastapi/core_api/routes/public/test_connections.py +++ b/tests/api_fastapi/core_api/routes/public/test_connections.py @@ -169,3 +169,122 @@ def test_should_respond_200( body = response.json() assert body["total_entries"] == expected_total_entries assert [connection["connection_id"] for connection in body["connections"]] == expected_ids + + +class TestPostConnection(TestConnectionEndpoint): + @pytest.mark.parametrize( + "body", + [ + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE}, + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": None}, + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": "{}"}, + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": '{"key": "value"}'}, + { + "connection_id": TEST_CONN_ID, + "conn_type": TEST_CONN_TYPE, + "description": "test_description", + "host": "test_host", + "login": "test_login", + "schema": "test_schema", + "port": 8080, + "extra": '{"key": "value"}', + }, + ], + ) + def test_post_should_respond_200(self, test_client, session, body): + response = test_client.post("/public/connections/", json=body) + assert response.status_code == 201 + connection = session.query(Connection).all() + assert len(connection) == 1 + + @pytest.mark.parametrize( + "body", + [ + {"connection_id": "****", "conn_type": TEST_CONN_TYPE}, + {"connection_id": "test()", "conn_type": TEST_CONN_TYPE}, + {"connection_id": "this_^$#is_invalid", "conn_type": TEST_CONN_TYPE}, + {"connection_id": "iam_not@#$_connection_id", "conn_type": TEST_CONN_TYPE}, + ], + ) + def test_post_should_respond_400_for_invalid_conn_id(self, test_client, body): + response = test_client.post("/public/connections/", json=body) + assert response.status_code == 400 + connection_id = body["connection_id"] + assert response.json() == { + "detail": f"The key '{connection_id}' has to be made of " + "alphanumeric characters, dashes, dots and underscores exclusively", + } + + @pytest.mark.parametrize( + "body", + [ + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE}, + ], + ) + def test_post_should_respond_already_exist(self, test_client, body): + response = test_client.post("/public/connections/", json=body) + assert response.status_code == 201 + # Another request + response = test_client.post("/public/connections/", json=body) + assert response.status_code == 409 + assert response.json() == { + "detail": f"Connection with connection_id: `{TEST_CONN_ID}` already exists", + } + + @pytest.mark.enable_redact + @pytest.mark.parametrize( + "body, expected_response", + [ + ( + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "test-password"}, + { + "connection_id": TEST_CONN_ID, + "conn_type": TEST_CONN_TYPE, + "description": None, + "extra": None, + "host": None, + "login": None, + "password": "***", + "port": None, + "schema": None, + }, + ), + ( + {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "?>@#+!_%()#"}, + { + "connection_id": TEST_CONN_ID, + "conn_type": TEST_CONN_TYPE, + "description": None, + "extra": None, + "host": None, + "login": None, + "password": "***", + "port": None, + "schema": None, + }, + ), + ( + { + "connection_id": TEST_CONN_ID, + "conn_type": TEST_CONN_TYPE, + "password": "A!rF|0wi$aw3s0m3", + "extra": '{"password": "test-password"}', + }, + { + "connection_id": TEST_CONN_ID, + "conn_type": TEST_CONN_TYPE, + "description": None, + "extra": '{"password": "***"}', + "host": None, + "login": None, + "password": "***", + "port": None, + "schema": None, + }, + ), + ], + ) + def test_post_should_response_201_redacted_password(self, test_client, body, expected_response): + response = test_client.post("/public/connections/", json=body) + assert response.status_code == 201 + assert response.json() == expected_response From 69c1f92eae21ad0516c96298a210597269fddab9 Mon Sep 17 00:00:00 2001 From: Omkar P <45419097+omkar-foss@users.noreply.github.com> Date: Tue, 5 Nov 2024 21:28:11 +0530 Subject: [PATCH 044/137] Migrate the public endpoint Get DAG Stats to FastAPI (#43255) * Migrate public endpoint Get DAG Stats to FastAPI, with main resynced * Re-run static checks * Add newlines to separate entities --- .../endpoints/dag_stats_endpoint.py | 2 + airflow/api_fastapi/common/db/common.py | 5 +- airflow/api_fastapi/common/db/dag_runs.py | 32 ++ airflow/api_fastapi/common/parameters.py | 17 + .../core_api/openapi/v1-generated.yaml | 98 +++++ .../core_api/routes/public/__init__.py | 2 + .../core_api/routes/public/dag_stats.py | 79 ++++ .../core_api/serializers/dag_stats.py | 43 ++ airflow/ui/openapi-gen/queries/common.ts | 17 + airflow/ui/openapi-gen/queries/prefetch.ts | 21 + airflow/ui/openapi-gen/queries/queries.ts | 27 ++ airflow/ui/openapi-gen/queries/suspense.ts | 27 ++ .../ui/openapi-gen/requests/schemas.gen.ts | 56 +++ .../ui/openapi-gen/requests/services.gen.ts | 31 ++ airflow/ui/openapi-gen/requests/types.gen.ts | 61 +++ .../core_api/routes/public/test_dag_stats.py | 416 ++++++++++++++++++ 16 files changed, 933 insertions(+), 1 deletion(-) create mode 100644 airflow/api_fastapi/common/db/dag_runs.py create mode 100644 airflow/api_fastapi/core_api/routes/public/dag_stats.py create mode 100644 airflow/api_fastapi/core_api/serializers/dag_stats.py create mode 100644 tests/api_fastapi/core_api/routes/public/test_dag_stats.py diff --git a/airflow/api_connexion/endpoints/dag_stats_endpoint.py b/airflow/api_connexion/endpoints/dag_stats_endpoint.py index 3b6c6ab8e0df..c4d8701f8d3c 100644 --- a/airflow/api_connexion/endpoints/dag_stats_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_stats_endpoint.py @@ -27,6 +27,7 @@ ) from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.models.dag import DagRun +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState from airflow.www.extensions.init_auth_manager import get_auth_manager @@ -37,6 +38,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.RUN) @provide_session def get_dag_stats( diff --git a/airflow/api_fastapi/common/db/common.py b/airflow/api_fastapi/common/db/common.py index 3feb3ba59950..01e1fe532bf6 100644 --- a/airflow/api_fastapi/common/db/common.py +++ b/airflow/api_fastapi/common/db/common.py @@ -65,13 +65,16 @@ def paginated_select( offset: BaseParam | None = None, limit: BaseParam | None = None, session: Session = NEW_SESSION, + return_total_entries: bool = True, ) -> Select: base_select = apply_filters_to_select( base_select, filters, ) - total_entries = get_query_count(base_select, session=session) + total_entries = None + if return_total_entries: + total_entries = get_query_count(base_select, session=session) # TODO: Re-enable when permissions are handled. Readable / writable entities, # for instance: diff --git a/airflow/api_fastapi/common/db/dag_runs.py b/airflow/api_fastapi/common/db/dag_runs.py new file mode 100644 index 000000000000..8f4b02a067fb --- /dev/null +++ b/airflow/api_fastapi/common/db/dag_runs.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from sqlalchemy import func, select + +from airflow.models.dagrun import DagRun + +dagruns_select_with_state_count = ( + select( + DagRun.dag_id, + DagRun.state, + func.count(DagRun.state), + ) + .group_by(DagRun.dag_id, DagRun.state) + .order_by(DagRun.dag_id) +) diff --git a/airflow/api_fastapi/common/parameters.py b/airflow/api_fastapi/common/parameters.py index 218077ca59e5..64ae9406f08c 100644 --- a/airflow/api_fastapi/common/parameters.py +++ b/airflow/api_fastapi/common/parameters.py @@ -112,6 +112,18 @@ def depends(self, only_active: bool = True) -> _OnlyActiveFilter: return self.set_value(only_active) +class _DagIdsFilter(BaseParam[list[str]]): + """Filter on multi-valued dag_ids param for DagRun.""" + + def to_orm(self, select: Select) -> Select: + if self.value and self.skip_none: + return select.where(DagRun.dag_id.in_(self.value)) + return select + + def depends(self, dag_ids: list[str] = Query(None)) -> _DagIdsFilter: + return self.set_value(dag_ids) + + class _SearchParam(BaseParam[str]): """Search on attribute.""" @@ -325,6 +337,7 @@ def depends(self, dag_id: str | None = None) -> _DagIdFilter: # Common Safe DateTime DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] + # DAG QueryLimit = Annotated[_LimitFilter, Depends(_LimitFilter().depends)] QueryOffset = Annotated[_OffsetFilter, Depends(_OffsetFilter().depends)] @@ -339,10 +352,14 @@ def depends(self, dag_id: str | None = None) -> _DagIdFilter: ] QueryTagsFilter = Annotated[_TagsFilter, Depends(_TagsFilter().depends)] QueryOwnersFilter = Annotated[_OwnersFilter, Depends(_OwnersFilter().depends)] + # DagRun QueryLastDagRunStateFilter = Annotated[_LastDagRunStateFilter, Depends(_LastDagRunStateFilter().depends)] +QueryDagIdsFilter = Annotated[_DagIdsFilter, Depends(_DagIdsFilter().depends)] + # DAGWarning QueryDagIdInDagWarningFilter = Annotated[_DagIdFilter, Depends(_DagIdFilter(DagWarning.dag_id).depends)] QueryWarningTypeFilter = Annotated[_WarningTypeFilter, Depends(_WarningTypeFilter().depends)] + # DAGTags QueryDagTagPatternSearch = Annotated[_DagTagNamePatternSearch, Depends(_DagTagNamePatternSearch().depends)] diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 06a041b55daa..5f6090394702 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -2462,6 +2462,59 @@ paths: application/json: schema: $ref: '#/components/schemas/VersionInfo' + /public/dagStats/: + get: + tags: + - DagStats + summary: Get Dag Stats + description: Get Dag statistics. + operationId: get_dag_stats + parameters: + - name: dag_ids + in: query + required: false + schema: + type: array + items: + type: string + title: Dag Ids + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DagStatsCollectionResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: AppBuilderMenuItemResponse: @@ -3467,6 +3520,51 @@ components: - asset_triggered title: DagRunType description: Class with DagRun types. + DagStatsCollectionResponse: + properties: + dags: + items: + $ref: '#/components/schemas/DagStatsResponse' + type: array + title: Dags + total_entries: + type: integer + title: Total Entries + type: object + required: + - dags + - total_entries + title: DagStatsCollectionResponse + description: DAG Stats Collection serializer for responses. + DagStatsResponse: + properties: + dag_id: + type: string + title: Dag Id + stats: + items: + $ref: '#/components/schemas/DagStatsStateResponse' + type: array + title: Stats + type: object + required: + - dag_id + - stats + title: DagStatsResponse + description: DAG Stats serializer for responses. + DagStatsStateResponse: + properties: + state: + $ref: '#/components/schemas/DagRunState' + count: + type: integer + title: Count + type: object + required: + - state + - count + title: DagStatsStateResponse + description: DagStatsState serializer for responses. DagTagPydantic: properties: name: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index 68caa2d77545..b7c8affe4a9c 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -22,6 +22,7 @@ from airflow.api_fastapi.core_api.routes.public.connections import connections_router from airflow.api_fastapi.core_api.routes.public.dag_run import dag_run_router from airflow.api_fastapi.core_api.routes.public.dag_sources import dag_sources_router +from airflow.api_fastapi.core_api.routes.public.dag_stats import dag_stats_router from airflow.api_fastapi.core_api.routes.public.dag_warning import dag_warning_router from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router @@ -54,3 +55,4 @@ public_router.include_router(variables_router) public_router.include_router(variables_router) public_router.include_router(version_router) +public_router.include_router(dag_stats_router) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_stats.py b/airflow/api_fastapi/core_api/routes/public/dag_stats.py new file mode 100644 index 000000000000..deed40f0116d --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/dag_stats.py @@ -0,0 +1,79 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.db.dag_runs import dagruns_select_with_state_count +from airflow.api_fastapi.common.parameters import QueryDagIdsFilter +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.dag_stats import ( + DagStatsCollectionResponse, + DagStatsResponse, + DagStatsStateResponse, +) +from airflow.utils.state import DagRunState + +dag_stats_router = AirflowRouter(tags=["DagStats"], prefix="/dagStats") + + +@dag_stats_router.get( + "/", + responses=create_openapi_http_exception_doc([400, 401, 403, 404]), +) +async def get_dag_stats( + session: Annotated[Session, Depends(get_session)], + dag_ids: QueryDagIdsFilter, +) -> DagStatsCollectionResponse: + """Get Dag statistics.""" + dagruns_select, _ = paginated_select( + base_select=dagruns_select_with_state_count, + filters=[dag_ids], + session=session, + return_total_entries=False, + ) + query_result = session.execute(dagruns_select) + + result_dag_ids = [] + dag_state_data = {} + for dag_id, state, count in query_result: + dag_state_data[(dag_id, state)] = count + if dag_id not in result_dag_ids: + result_dag_ids.append(dag_id) + + dags = [ + DagStatsResponse( + dag_id=dag_id, + stats=[ + DagStatsStateResponse( + state=state, + count=dag_state_data.get((dag_id, state), 0), + ) + for state in DagRunState + ], + ) + for dag_id in result_dag_ids + ] + return DagStatsCollectionResponse(dags=dags, total_entries=len(dags)) diff --git a/airflow/api_fastapi/core_api/serializers/dag_stats.py b/airflow/api_fastapi/core_api/serializers/dag_stats.py new file mode 100644 index 000000000000..0d768c2cbac0 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/dag_stats.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from pydantic import BaseModel + +from airflow.utils.state import DagRunState + + +class DagStatsStateResponse(BaseModel): + """DagStatsState serializer for responses.""" + + state: DagRunState + count: int + + +class DagStatsResponse(BaseModel): + """DAG Stats serializer for responses.""" + + dag_id: str + stats: list[DagStatsStateResponse] + + +class DagStatsCollectionResponse(BaseModel): + """DAG Stats Collection serializer for responses.""" + + dags: list[DagStatsResponse] + total_entries: int diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 2ed842201c2e..cec1f0f314dc 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -8,6 +8,7 @@ import { DagRunService, DagService, DagSourceService, + DagStatsService, DagWarningService, DagsService, DashboardService, @@ -679,6 +680,22 @@ export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ useVersionServiceGetVersionKey, ...(queryKey ?? []), ]; +export type DagStatsServiceGetDagStatsDefaultResponse = Awaited< + ReturnType +>; +export type DagStatsServiceGetDagStatsQueryResult< + TData = DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagStatsServiceGetDagStatsKey = "DagStatsServiceGetDagStats"; +export const UseDagStatsServiceGetDagStatsKeyFn = ( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: Array, +) => [useDagStatsServiceGetDagStatsKey, ...(queryKey ?? [{ dagIds }])]; export type BackfillServiceCreateBackfillMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 6c41b7a5a1b5..04443427aca7 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -8,6 +8,7 @@ import { DagRunService, DagService, DagSourceService, + DagStatsService, DagWarningService, DagsService, DashboardService, @@ -875,3 +876,23 @@ export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => queryKey: Common.UseVersionServiceGetVersionKeyFn(), queryFn: () => VersionService.getVersion(), }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagStatsServiceGetDagStats = ( + queryClient: QueryClient, + { + dagIds, + }: { + dagIds?: string[]; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), + queryFn: () => DagStatsService.getDagStats({ dagIds }), + }); diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 583f14f7711d..11dea6f3df58 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -13,6 +13,7 @@ import { DagRunService, DagService, DagSourceService, + DagStatsService, DagWarningService, DagsService, DashboardService, @@ -1093,6 +1094,32 @@ export const useVersionServiceGetVersion = < queryFn: () => VersionService.getVersion() as TData, ...options, }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagStatsServiceGetDagStats = < + TData = Common.DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), + queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, + ...options, + }); /** * Create Backfill * @param data The data for the request. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 2870605672be..eed1a0afe805 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -8,6 +8,7 @@ import { DagRunService, DagService, DagSourceService, + DagStatsService, DagWarningService, DagsService, DashboardService, @@ -1078,3 +1079,29 @@ export const useVersionServiceGetVersionSuspense = < queryFn: () => VersionService.getVersion() as TData, ...options, }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagStatsServiceGetDagStatsSuspense = < + TData = Common.DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), + queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index c1dc8cd34576..d64abb3853b4 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1580,6 +1580,62 @@ export const $DagRunType = { description: "Class with DagRun types.", } as const; +export const $DagStatsCollectionResponse = { + properties: { + dags: { + items: { + $ref: "#/components/schemas/DagStatsResponse", + }, + type: "array", + title: "Dags", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["dags", "total_entries"], + title: "DagStatsCollectionResponse", + description: "DAG Stats Collection serializer for responses.", +} as const; + +export const $DagStatsResponse = { + properties: { + dag_id: { + type: "string", + title: "Dag Id", + }, + stats: { + items: { + $ref: "#/components/schemas/DagStatsStateResponse", + }, + type: "array", + title: "Stats", + }, + }, + type: "object", + required: ["dag_id", "stats"], + title: "DagStatsResponse", + description: "DAG Stats serializer for responses.", +} as const; + +export const $DagStatsStateResponse = { + properties: { + state: { + $ref: "#/components/schemas/DagRunState", + }, + count: { + type: "integer", + title: "Count", + }, + }, + type: "object", + required: ["state", "count"], + title: "DagStatsStateResponse", + description: "DagStatsState serializer for responses.", +} as const; + export const $DagTagPydantic = { properties: { name: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 4eecb848a57c..915636fbb53b 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -91,6 +91,8 @@ import type { PostVariableData, PostVariableResponse, GetVersionResponse, + GetDagStatsData, + GetDagStatsResponse, } from "./types.gen"; export class AssetService { @@ -1415,3 +1417,32 @@ export class VersionService { }); } } + +export class DagStatsService { + /** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagStats( + data: GetDagStatsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dagStats/", + query: { + dag_ids: data.dagIds, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 603a20d09003..f96acacf6119 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -347,6 +347,30 @@ export type DagRunType = | "manual" | "asset_triggered"; +/** + * DAG Stats Collection serializer for responses. + */ +export type DagStatsCollectionResponse = { + dags: Array; + total_entries: number; +}; + +/** + * DAG Stats serializer for responses. + */ +export type DagStatsResponse = { + dag_id: string; + stats: Array; +}; + +/** + * DagStatsState serializer for responses. + */ +export type DagStatsStateResponse = { + state: DagRunState; + count: number; +}; + /** * Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API. */ @@ -1045,6 +1069,12 @@ export type PostVariableResponse = VariableResponse; export type GetVersionResponse = VersionInfo; +export type GetDagStatsData = { + dagIds?: Array; +}; + +export type GetDagStatsResponse = DagStatsCollectionResponse; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -2165,4 +2195,35 @@ export type $OpenApiTs = { }; }; }; + "/public/dagStats/": { + get: { + req: GetDagStatsData; + res: { + /** + * Successful Response + */ + 200: DagStatsCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; }; diff --git a/tests/api_fastapi/core_api/routes/public/test_dag_stats.py b/tests/api_fastapi/core_api/routes/public/test_dag_stats.py new file mode 100644 index 000000000000..e2611addd70c --- /dev/null +++ b/tests/api_fastapi/core_api/routes/public/test_dag_stats.py @@ -0,0 +1,416 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timedelta + +import pytest + +from airflow.models.dag import DagModel +from airflow.models.dagrun import DagRun +from airflow.utils import timezone +from airflow.utils.state import DagRunState +from airflow.utils.types import DagRunType + +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +pytestmark = pytest.mark.db_test + +DAG1_ID = "test_dag1" +DAG2_ID = "test_dag2" +DAG3_ID = "test_dag3" +TASK_ID = "op1" +API_PREFIX = "/public/dagStats" + + +class TestDagStatsEndpoint: + default_time = "2020-06-11T18:00:00+00:00" + + @staticmethod + def _clear_db(): + clear_db_runs() + clear_db_dags() + clear_db_serialized_dags() + + def _create_dag_and_runs(self, session=None): + dag_1 = DagModel( + dag_id=DAG1_ID, + fileloc="/tmp/dag_stats_1.py", + timetable_summary="2 2 * * *", + is_active=False, + is_paused=True, + owners="test_owner,another_test_owner", + next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + ) + dag_1_run_1 = DagRun( + dag_id=DAG1_ID, + run_id="test_dag_run_id_1", + run_type=DagRunType.MANUAL, + execution_date=timezone.parse(self.default_time), + start_date=timezone.parse(self.default_time), + external_trigger=True, + state="running", + ) + dag_1_run_2 = DagRun( + dag_id=dag_1.dag_id, + run_id="test_dag_run_id_2", + run_type=DagRunType.MANUAL, + execution_date=timezone.parse(self.default_time) + timedelta(days=1), + start_date=timezone.parse(self.default_time), + external_trigger=True, + state="failed", + ) + dag_2 = DagModel( + dag_id=DAG2_ID, + fileloc="/tmp/dag_stats_2.py", + timetable_summary="2 2 * * *", + is_active=False, + is_paused=True, + owners="test_owner,another_test_owner", + next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + ) + dag_2_run_1 = DagRun( + dag_id=dag_2.dag_id, + run_id="test_dag_2_run_id_1", + run_type=DagRunType.MANUAL, + execution_date=timezone.parse(self.default_time), + start_date=timezone.parse(self.default_time), + external_trigger=True, + state="queued", + ) + dag_3 = DagModel( + dag_id=DAG3_ID, + fileloc="/tmp/dag_stats_3.py", + timetable_summary="2 2 * * *", + is_active=False, + is_paused=True, + owners="test_owner,another_test_owner", + next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + ) + dag_3_run_1 = DagRun( + dag_id=dag_3.dag_id, + run_id="test_dag_3_run_id_1", + run_type=DagRunType.MANUAL, + execution_date=timezone.parse(self.default_time), + start_date=timezone.parse(self.default_time), + external_trigger=True, + state="success", + ) + entities = ( + dag_1, + dag_1_run_1, + dag_1_run_2, + dag_2, + dag_2_run_1, + dag_3, + dag_3_run_1, + ) + session.add_all(entities) + session.commit() + + @pytest.fixture(autouse=True) + def setup(self) -> None: + self._clear_db() + + def teardown_method(self) -> None: + self._clear_db() + + +class TestGetDagStats(TestDagStatsEndpoint): + """Unit tests for Get DAG Stats.""" + + def test_should_respond_200(self, client, session): + self._create_dag_and_runs(session) + exp_payload = { + "dags": [ + { + "dag_id": DAG1_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + }, + { + "dag_id": DAG2_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 1, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 2, + } + + response = client().get(f"{API_PREFIX}?dag_ids={DAG1_ID}&dag_ids={DAG2_ID}") + assert response.status_code == 200 + res_json = response.json() + assert res_json["total_entries"] == len(res_json["dags"]) + assert res_json == exp_payload + + def test_all_dags_should_respond_200(self, client, session): + self._create_dag_and_runs(session) + exp_payload = { + "dags": [ + { + "dag_id": DAG1_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + }, + { + "dag_id": DAG2_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 1, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + { + "dag_id": DAG3_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 1, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 3, + } + + response = client().get(API_PREFIX) + assert response.status_code == 200 + res_json = response.json() + assert res_json["total_entries"] == len(res_json["dags"]) + assert res_json == exp_payload + + @pytest.mark.parametrize( + "url, params, exp_payload", + [ + ( + API_PREFIX, + [ + ("dag_ids", DAG1_ID), + ("dag_ids", DAG3_ID), + ("dag_ids", DAG2_ID), + ], + { + "dags": [ + { + "dag_id": DAG1_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + }, + { + "dag_id": DAG2_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 1, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + { + "dag_id": DAG3_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 1, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 3, + }, + ), + ( + API_PREFIX, + [("dag_ids", DAG1_ID)], + { + "dags": [ + { + "dag_id": DAG1_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + } + ], + "total_entries": 1, + }, + ), + ( + API_PREFIX, + [("dag_ids", DAG3_ID)], + { + "dags": [ + { + "dag_id": DAG3_ID, + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 1, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 1, + }, + ), + ], + ) + def test_single_dag_in_dag_ids(self, client, session, url, params, exp_payload): + self._create_dag_and_runs(session) + response = client().get(url, params=params) + assert response.status_code == 200 + res_json = response.json() + assert res_json["total_entries"] == len(res_json["dags"]) + assert res_json == exp_payload From f2dd3502887e9f085e52de0e38692c1c173b3557 Mon Sep 17 00:00:00 2001 From: Jim Baldwin <14864200+jimwbaldwin@users.noreply.github.com> Date: Wed, 6 Nov 2024 03:01:17 +1100 Subject: [PATCH 045/137] Resolve `GlueJobTrigger` serialization bug causing verbose to always be True (#43622) --- .../providers/amazon/aws/triggers/glue.py | 2 +- .../tests/amazon/aws/operators/test_glue.py | 25 +++++++++++++++++++ .../tests/amazon/aws/triggers/test_glue.py | 18 +++++++++++++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/providers/src/airflow/providers/amazon/aws/triggers/glue.py b/providers/src/airflow/providers/amazon/aws/triggers/glue.py index 3c966f60323b..c4b0c38c66df 100644 --- a/providers/src/airflow/providers/amazon/aws/triggers/glue.py +++ b/providers/src/airflow/providers/amazon/aws/triggers/glue.py @@ -62,7 +62,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: { "job_name": self.job_name, "run_id": self.run_id, - "verbose": str(self.verbose), + "verbose": self.verbose, "aws_conn_id": self.aws_conn_id, "job_poll_interval": self.job_poll_interval, }, diff --git a/providers/tests/amazon/aws/operators/test_glue.py b/providers/tests/amazon/aws/operators/test_glue.py index 8243940e695f..d01e80788462 100644 --- a/providers/tests/amazon/aws/operators/test_glue.py +++ b/providers/tests/amazon/aws/operators/test_glue.py @@ -182,6 +182,31 @@ def test_execute_with_verbose_logging( ) assert glue.job_name == JOB_NAME + @mock.patch.object(GlueJobHook, "print_job_logs") + @mock.patch.object(GlueJobHook, "get_job_state") + @mock.patch.object(GlueJobHook, "initialize_job") + @mock.patch.object(GlueJobHook, "get_conn") + @mock.patch.object(S3Hook, "load_file") + def test_execute_without_verbose_logging( + self, mock_load_file, mock_get_conn, mock_initialize_job, mock_get_job_state, mock_print_job_logs + ): + glue = GlueJobOperator( + task_id=TASK_ID, + job_name=JOB_NAME, + script_location="s3_uri", + s3_bucket="bucket_name", + iam_role_name="role_arn", + verbose=False, + ) + mock_initialize_job.return_value = {"JobRunState": "RUNNING", "JobRunId": JOB_RUN_ID} + mock_get_job_state.return_value = "SUCCEEDED" + + glue.execute(mock.MagicMock()) + + mock_initialize_job.assert_called_once_with({}, {}) + mock_print_job_logs.assert_not_called() + assert glue.job_name == JOB_NAME + @mock.patch.object(GlueJobHook, "print_job_logs") @mock.patch.object(GlueJobHook, "job_completion") @mock.patch.object(GlueJobHook, "initialize_job") diff --git a/providers/tests/amazon/aws/triggers/test_glue.py b/providers/tests/amazon/aws/triggers/test_glue.py index e39e38d8b760..2f4830bb8689 100644 --- a/providers/tests/amazon/aws/triggers/test_glue.py +++ b/providers/tests/amazon/aws/triggers/test_glue.py @@ -82,6 +82,24 @@ async def test_wait_job_failed(self, get_state_mock: mock.MagicMock): assert get_state_mock.call_count == 3 + def test_serialization(self): + trigger = GlueJobCompleteTrigger( + job_name="job_name", + run_id="JobRunId", + verbose=False, + aws_conn_id="aws_conn_id", + job_poll_interval=0.1, + ) + classpath, kwargs = trigger.serialize() + assert classpath == "airflow.providers.amazon.aws.triggers.glue.GlueJobCompleteTrigger" + assert kwargs == { + "job_name": "job_name", + "run_id": "JobRunId", + "verbose": False, + "aws_conn_id": "aws_conn_id", + "job_poll_interval": 0.1, + } + class TestGlueCatalogPartitionSensorTrigger: @pytest.mark.asyncio From d536ec4bd1da958d2f2e5822a6fec647baa12ba9 Mon Sep 17 00:00:00 2001 From: Balthazar Rouberol Date: Tue, 5 Nov 2024 17:07:02 +0100 Subject: [PATCH 046/137] fab_auth_manager: allow get_user method to return the user authenticated via Kerberos (#43662) --- .../fab/auth_manager/fab_auth_manager.py | 15 +++++++++-- .../fab/auth_manager/test_fab_auth_manager.py | 26 ++++++++++++++++--- 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py b/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py index 8a8fad678869..e93e440f5ddf 100644 --- a/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py +++ b/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py @@ -25,7 +25,7 @@ import packaging.version from connexion import FlaskApi -from flask import Blueprint, url_for +from flask import Blueprint, g, url_for from packaging.version import Version from sqlalchemy import select from sqlalchemy.orm import Session, joinedload @@ -183,9 +183,20 @@ def get_user_display_name(self) -> str: return f"{first_name} {last_name}".strip() def get_user(self) -> User: - """Return the user associated to the user in session.""" + """ + Return the user associated to the user in session. + + Attempt to find the current user in g.user, as defined by the kerberos authentication backend. + If no such user is found, return the `current_user` local proxy object, linked to the user session. + + """ from flask_login import current_user + # If a user has gone through the Kerberos dance, the kerberos authentication manager + # has linked it with a User model, stored in g.user, and not the session. + if current_user.is_anonymous and getattr(g, "user", None) is not None and not g.user.is_anonymous: + return g.user + return current_user def init(self) -> None: diff --git a/providers/tests/fab/auth_manager/test_fab_auth_manager.py b/providers/tests/fab/auth_manager/test_fab_auth_manager.py index 91efb8428c65..d298f7667eaa 100644 --- a/providers/tests/fab/auth_manager/test_fab_auth_manager.py +++ b/providers/tests/fab/auth_manager/test_fab_auth_manager.py @@ -16,13 +16,14 @@ # under the License. from __future__ import annotations +from contextlib import contextmanager from itertools import chain from typing import TYPE_CHECKING from unittest import mock from unittest.mock import Mock import pytest -from flask import Flask +from flask import Flask, g from airflow.exceptions import AirflowConfigException, AirflowException @@ -72,6 +73,13 @@ } +@contextmanager +def user_set(app, user): + g.user = user + yield + g.user = None + + @pytest.fixture def auth_manager(): return FabAuthManager(None) @@ -114,12 +122,24 @@ def test_get_user_display_name( assert auth_manager.get_user_display_name() == expected @mock.patch("flask_login.utils._get_user") - def test_get_user(self, mock_current_user, auth_manager): + def test_get_user(self, mock_current_user, minimal_app_for_auth_api, auth_manager): user = Mock() user.is_anonymous.return_value = True mock_current_user.return_value = user + with minimal_app_for_auth_api.app_context(): + assert auth_manager.get_user() == user - assert auth_manager.get_user() == user + @mock.patch("flask_login.utils._get_user") + def test_get_user_from_flask_g(self, mock_current_user, minimal_app_for_auth_api, auth_manager): + session_user = Mock() + session_user.is_anonymous = True + mock_current_user.return_value = session_user + + flask_g_user = Mock() + flask_g_user.is_anonymous = False + with minimal_app_for_auth_api.app_context(): + with user_set(minimal_app_for_auth_api, flask_g_user): + assert auth_manager.get_user() == flask_g_user @pytest.mark.db_test @mock.patch.object(FabAuthManager, "get_user") From 74fc28020a7e50acb011129bbb84ce457cf80abd Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:07:58 +0200 Subject: [PATCH 047/137] GlueJobOperator: add option to wait for cleanup before returning job status (#43688) --- .../src/airflow/providers/amazon/aws/hooks/glue.py | 6 +++++- .../airflow/providers/amazon/aws/operators/glue.py | 11 ++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glue.py b/providers/src/airflow/providers/amazon/aws/hooks/glue.py index 08a96836daf7..80c05feca92c 100644 --- a/providers/src/airflow/providers/amazon/aws/hooks/glue.py +++ b/providers/src/airflow/providers/amazon/aws/hooks/glue.py @@ -282,13 +282,16 @@ def display_logs_from(log_group: str, continuation_token: str | None) -> str | N log_group_error, continuation_tokens.error_stream_continuation ) - def job_completion(self, job_name: str, run_id: str, verbose: bool = False) -> dict[str, str]: + def job_completion( + self, job_name: str, run_id: str, verbose: bool = False, sleep_before_return: int = 0 + ) -> dict[str, str]: """ Wait until Glue job with job_name finishes; return final state if finished or raises AirflowException. :param job_name: unique job name per AWS account :param run_id: The job-run ID of the predecessor job run :param verbose: If True, more Glue Job Run logs show in the Airflow Task Logs. (default: False) + :param sleep_before_return: time in seconds to wait before returning final status. :return: Dict of JobRunState and JobRunId """ next_log_tokens = self.LogContinuationTokens() @@ -296,6 +299,7 @@ def job_completion(self, job_name: str, run_id: str, verbose: bool = False) -> d job_run_state = self.get_job_state(job_name, run_id) ret = self._handle_state(job_run_state, job_name, run_id, verbose, next_log_tokens) if ret: + time.sleep(sleep_before_return) return ret else: time.sleep(self.job_poll_interval) diff --git a/providers/src/airflow/providers/amazon/aws/operators/glue.py b/providers/src/airflow/providers/amazon/aws/operators/glue.py index 6673e4b29f7c..563d619cd994 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/glue.py +++ b/providers/src/airflow/providers/amazon/aws/operators/glue.py @@ -74,6 +74,11 @@ class GlueJobOperator(BaseOperator): :param update_config: If True, Operator will update job configuration. (default: False) :param replace_script_file: If True, the script file will be replaced in S3. (default: False) :param stop_job_run_on_kill: If True, Operator will stop the job run when task is killed. + :param sleep_before_return: time in seconds to wait before returning final status. This is meaningful in case + of limiting concurrency, Glue needs 5-10 seconds to clean up resources. + Thus if status is returned immediately it might end up in case of more than 1 concurrent run. + It is recommended to set this parameter to 10 when you are using concurrency=1. + For more information see: https://repost.aws/questions/QUaKgpLBMPSGWO0iq2Fob_bw/glue-run-concurrent-jobs#ANFpCL2fRnQRqgDFuIU_rpvA """ template_fields: Sequence[str] = ( @@ -118,6 +123,7 @@ def __init__( update_config: bool = False, job_poll_interval: int | float = 6, stop_job_run_on_kill: bool = False, + sleep_before_return: int = 0, **kwargs, ): super().__init__(**kwargs) @@ -145,6 +151,7 @@ def __init__( self.job_poll_interval = job_poll_interval self.stop_job_run_on_kill = stop_job_run_on_kill self._job_run_id: str | None = None + self.sleep_before_return: int = sleep_before_return @cached_property def glue_job_hook(self) -> GlueJobHook: @@ -220,7 +227,9 @@ def execute(self, context: Context): method_name="execute_complete", ) elif self.wait_for_completion: - glue_job_run = self.glue_job_hook.job_completion(self.job_name, self._job_run_id, self.verbose) + glue_job_run = self.glue_job_hook.job_completion( + self.job_name, self._job_run_id, self.verbose, self.sleep_before_return + ) self.log.info( "AWS Glue Job: %s status: %s. Run Id: %s", self.job_name, From 6a5b22df0e2f399afc8e952165fd005b81101e08 Mon Sep 17 00:00:00 2001 From: SAI GANESH S Date: Tue, 5 Nov 2024 22:19:36 +0530 Subject: [PATCH 048/137] Replaced bootstrap-typeahead with jquery-ui (#40744) * Replaced Bootstrap-typeahead with jquery-ui for typeahead functionality for searching other timezones, present at the navbar. * Replace bootstrap-typeahead with jquery-ui in dags.js * Removed bootstrap3-typeahead package * Added custom css for typeahead * Fixed linting issues in dags.js * fixed linting issue in main.css --- airflow/www/package.json | 2 +- airflow/www/static/css/main.css | 18 +++++ airflow/www/static/js/dags.js | 79 ++++++++++++++------- airflow/www/static/js/main.js | 91 +++++++++++++++++-------- airflow/www/templates/airflow/main.html | 4 +- airflow/www/webpack.config.js | 9 ++- airflow/www/yarn.lock | 19 ++++-- 7 files changed, 159 insertions(+), 63 deletions(-) diff --git a/airflow/www/package.json b/airflow/www/package.json index 79991b3bb9af..638e7821396a 100644 --- a/airflow/www/package.json +++ b/airflow/www/package.json @@ -109,7 +109,6 @@ "@visx/shape": "^2.12.2", "ansi_up": "^6.0.2", "axios": "^1.7.4", - "bootstrap-3-typeahead": "^4.0.2", "camelcase-keys": "^7.0.0", "chakra-react-select": "^4.0.0", "codemirror": "^5.59.1", @@ -124,6 +123,7 @@ "eonasdan-bootstrap-datetimepicker": "^4.17.47", "framer-motion": "^6.0.0", "jquery": ">=3.5.0", + "jquery-ui": "1.13.3", "jshint": "^2.13.4", "json-to-pretty-yaml": "^1.2.2", "lodash": "^4.17.21", diff --git a/airflow/www/static/css/main.css b/airflow/www/static/css/main.css index 0a2810c52e6f..9d9ed1e9fcf9 100644 --- a/airflow/www/static/css/main.css +++ b/airflow/www/static/css/main.css @@ -288,10 +288,28 @@ label[for="timezone-other"], font-weight: normal; } +#timezone-menu { + overflow-x: hidden; +} + #timezone-menu ul.typeahead.dropdown-menu { max-height: 200px; overflow-y: auto; overflow-x: hidden; + border: none; + left: 0 !important; +} + +#timezone-menu .dropdown-item { + padding: 3px 20px; +} + +.ui-menu .ui-menu-item a.ui-state-focus, +.ui-menu .ui-menu-item a.ui-state-active { + border: 1px solid white; + border-left: 1px solid #ccc; + background: #f6f6f6; + color: #454545; } /* depending on the version of FAB in use, we may have a style conflict */ diff --git a/airflow/www/static/js/dags.js b/airflow/www/static/js/dags.js index 9228ca630b51..66ea1244879c 100644 --- a/airflow/www/static/js/dags.js +++ b/airflow/www/static/js/dags.js @@ -116,31 +116,60 @@ $.each($("[id^=toggle]"), function toggleId() { }); }); -$(".typeahead").typeahead({ - source(query, callback) { - return $.ajax(autocompleteUrl, { - data: { - query: encodeURIComponent(query), - status: statusFilter, - }, - success: callback, - }); - }, - displayText(value) { - return value.dag_display_name || value.name; - }, - autoSelect: false, - afterSelect(value) { - const query = new URLSearchParams(window.location.search); - query.set("search", value.name); - if (value.type === "owner") { - window.location = `${DAGS_INDEX}?${query}`; - } - if (value.type === "dag") { - window.location = `${gridUrl.replace("__DAG_ID__", value.name)}?${query}`; - } - }, -}); +// eslint-disable-next-line no-underscore-dangle +$(".typeahead") + .autocomplete({ + autoFocus: true, + source: (request, response) => { + $.ajax({ + url: autocompleteUrl, + data: { + query: encodeURIComponent(request.term), + status: statusFilter, + }, + success: (data) => { + response(data); + }, + }); + }, + focus: (event) => { + // Prevents value from being inserted on focus + event.preventDefault(); + }, + select: (_, ui) => { + const value = ui.item; + const query = new URLSearchParams(window.location.search); + query.set("search", value.name); + if (value.type === "owner") { + window.location = `${DAGS_INDEX}?${query}`; + } + if (value.type === "dag") { + window.location = `${gridUrl.replace( + "__DAG_ID__", + value.name + )}?${query}`; + } + }, + appendTo: "#search_form > div", + }) + .data("ui-autocomplete")._renderMenu = function (ul, items) { + ul.addClass("typeahead dropdown-menu"); + $.each(items, function (_, item) { + // eslint-disable-next-line no-underscore-dangle + this._renderItemData(ul, item); + }); +}; + +// eslint-disable-next-line no-underscore-dangle +$.ui.autocomplete.prototype._renderItem = function (ul, item) { + return $("
  • ") + .append( + $("") + .addClass("dropdown-item") + .text(item.dag_display_name || item.name) + ) + .appendTo(ul); +}; $("#search_form").on("reset", () => { const query = new URLSearchParams(window.location.search); diff --git a/airflow/www/static/js/main.js b/airflow/www/static/js/main.js index 95861ea6c13a..c60827c7a362 100644 --- a/airflow/www/static/js/main.js +++ b/airflow/www/static/js/main.js @@ -180,35 +180,70 @@ function initializeUITimezone() { $("a[data-timezone]").click((evt) => { changeDisplayedTimezone($(evt.currentTarget).data("timezone")); }); - - $("#timezone-other").typeahead({ - source: $( - moment.tz.names().map((tzName) => { - const category = tzName.split("/", 1)[0]; - return { category, name: tzName.replace("_", " "), tzName }; - }) - ), - showHintOnFocus: "all", - showCategoryHeader: true, - items: "all", - afterSelect(data) { - // Clear it for next time we open the pop-up - this.$element.val(""); - - setManualTimezone(data.tzName); - changeDisplayedTimezone(data.tzName); - - // We need to delay the close event to not be in the form handler, - // otherwise bootstrap ignores it, thinking it's caused by interaction on - // the
    - setTimeout(() => { - document.activeElement.blur(); - // Bug in typeahed, it thinks it's still shown! - this.shown = false; - this.focused = false; - }, 1); - }, + // Prepare the data source + const timezoneData = moment.tz.names().map((tzName) => { + const category = tzName.split("/", 1)[0]; + return { category, label: tzName.replace("_", " "), value: tzName }; }); + + // Create a custom filter function to include categories + function filterByCategory(array, term) { + const matcher = new RegExp($.ui.autocomplete.escapeRegex(term), "i"); + return $.grep( + array, + (item) => matcher.test(item.label) || matcher.test(item.category) + ); + } + + // Initialize jQuery UI Autocomplete + // eslint-disable-next-line no-underscore-dangle + $("#timezone-other") + .autocomplete({ + source: (request, response) => { + const results = filterByCategory(timezoneData, request.term); + response(results); + }, + appendTo: "#timezone-menu > li:nth-child(6) > form", + focus: (event, ui) => { + // Prevent the value from being inserted on focus + event.preventDefault(); + $(this).val(ui.item.label); + }, + select: (event, ui) => { + // Clear it for next time we open the pop-up + $(this).val(""); + + setManualTimezone(ui.item.value); + changeDisplayedTimezone(ui.item.value); + + return false; + }, + }) + .data("ui-autocomplete")._renderItem = function (ul, item) { + const $li = $("
  • "); + $li.append( + `${item.label}` + ); + return $li.appendTo(ul); + }; + + // Custom rendering function to include category headers + // eslint-disable-next-line no-underscore-dangle + $.ui.autocomplete.prototype._renderMenu = function (ul, items) { + let currentCategory = ""; + ul.addClass("typeahead dropdown-menu"); + ul.attr("role", "listbox"); + $.each(items, (index, item) => { + if (item.category !== currentCategory) { + ul.append( + `
  • ` + ); + currentCategory = item.category; + } + // eslint-disable-next-line no-underscore-dangle + this._renderItemData(ul, item); + }); + }; } function filterOpSelected(ele) { diff --git a/airflow/www/templates/airflow/main.html b/airflow/www/templates/airflow/main.html index 008418d7e5b7..3842d3602dd7 100644 --- a/airflow/www/templates/airflow/main.html +++ b/airflow/www/templates/airflow/main.html @@ -46,6 +46,7 @@ +