From 8914f988d376039fdcd562df07f7aa275a6935a9 Mon Sep 17 00:00:00 2001 From: adam Date: Fri, 12 Jan 2024 09:29:12 -0700 Subject: [PATCH 1/5] improve make documentation --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 465233f..86532fb 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,7 @@ test: $(SENTINELS)/dev-setup docker: requirements/main.txt $(DOCKER) build --cache-from "$(DOCKER_CACHE_FROM)" -t $(DOCKER_HOST)/$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG) . -## Tag and push a release (disabled) +## Tag and push a release (disabled; done via GitHub Actions now) release: @echo "Package '$(PACKAGE_NAME)' releases are managed via GitHub" From 74c37385fa0a308ab53ca24acdc9d17b06466b20 Mon Sep 17 00:00:00 2001 From: adam Date: Fri, 12 Jan 2024 15:25:16 -0700 Subject: [PATCH 2/5] Add GitHub Actions --- .github/dependabot.yml | 11 +++ .github/workflows/ci.yaml | 117 ++++++++++++++++++++++++++++++++ .github/workflows/periodic.yaml | 45 ++++++++++++ 3 files changed, 173 insertions(+) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/ci.yaml create mode 100644 .github/workflows/periodic.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..9bd0170 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "docker" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..7841b88 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,117 @@ +name: CI + +"on": + push: + branches-ignore: + # These should always correspond to pull requests, so ignore them for + # the push trigger and let them be triggered by the pull_request + # trigger, avoiding running the workflow twice. This is a minor + # optimization so there's no need to ensure this is comprehensive. + - "dependabot/**" + - "renovate/**" + - "tickets/**" + - "u/**" + tags: + - "*" + pull_request: {} + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Run pre-commit + uses: pre-commit/action@v3.0.0 + + test: + runs-on: ubuntu-latest + timeout-minutes: 10 + + strategy: + matrix: + python: + - "3.11" + + steps: + - uses: actions/checkout@v4 + + - name: Run tox + uses: lsst-sqre/run-tox@v1 + with: + python-version: ${{ matrix.python }} + tox-envs: "py,coverage-report,typing" + + build: + runs-on: ubuntu-latest + needs: [lint, test] + timeout-minutes: 10 + + # Only do Docker builds of tagged releases and pull requests from ticket + # branches. This will still trigger on pull requests from untrusted + # repositories whose branch names match our tickets/* branch convention, + # but in this case the build will fail with an error since the secret + # won't be set. + if: > + startsWith(github.ref, 'refs/tags/') + || startsWith(github.head_ref, 'tickets/') + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: lsst-sqre/build-and-push-to-ghcr@v1 + id: build + with: + image: ${{ github.repository }} + github_token: ${{ secrets.GITHUB_TOKEN }} + + test-packaging: + + name: Test packaging + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # full history for setuptools_scm + + - name: Build and publish + uses: lsst-sqre/build-and-publish-to-pypi@v2 + with: + python-version: "3.11" + upload: false + + pypi: + + # This job requires set up: + # 1. Set up a trusted publisher for PyPI + # 2. Set up a "pypi" environment in the repository + # See https://github.com/lsst-sqre/build-and-publish-to-pypi + name: Upload release to PyPI + runs-on: ubuntu-latest + needs: [lint, test, test-packaging] + environment: + name: pypi + url: https://pypi.org/p/giftless + permissions: + id-token: write + if: github.event_name == 'release' && github.event.action == 'published' + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # full history for setuptools_scm + + - name: Build and publish + uses: lsst-sqre/build-and-publish-to-pypi@v2 + with: + python-version: "3.11" diff --git a/.github/workflows/periodic.yaml b/.github/workflows/periodic.yaml new file mode 100644 index 0000000..ab088b9 --- /dev/null +++ b/.github/workflows/periodic.yaml @@ -0,0 +1,45 @@ +# This is a separate run of the Python test suite that doesn't cache the tox +# environment and runs from a schedule. The purpose is to test whether +# updating pinned dependencies would cause any tests to fail. + +name: Periodic CI + +"on": + schedule: + - cron: "0 12 * * 1" + workflow_dispatch: {} + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 10 + + strategy: + matrix: + python: + - "3.11" + + steps: + - uses: actions/checkout@v4 + + - name: Run neophile + uses: lsst-sqre/run-neophile@v1 + with: + python-version: ${{ matrix.python }} + mode: update + + - name: Run tox + uses: lsst-sqre/run-tox@v1 + with: + python-version: ${{ matrix.python }} + tox-envs: "lint,typing,py" + + - name: Report status + if: always() + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: "failure" + notification_title: "Periodic test for {repo} failed" + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_ALERT_WEBHOOK }} From 202f757257656fdad92d30a625a48d25c9872bff Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 15 Jan 2024 08:32:33 -0700 Subject: [PATCH 3/5] tweak makefile and requirements --- Makefile | 15 ++++++----- requirements/dev.in | 2 ++ requirements/dev.txt | 61 +++++++++++++++++++++++++++++++++---------- requirements/main.txt | 6 ++--- 4 files changed, 60 insertions(+), 24 deletions(-) diff --git a/Makefile b/Makefile index 86532fb..86574e2 100644 --- a/Makefile +++ b/Makefile @@ -28,18 +28,18 @@ VERSION := $(shell $(PYTHON) -c "from importlib.metadata import version;print(ve default: help -## Install packages necessary for make to work -init: - pip install --upgrade pip pre-commit pip-tools tox - ## Regenerate requirements files requirements: requirements/dev.txt requirements/dev.in requirements/main.txt requirements/main.in ## Set up the development environment dev-setup: $(SENTINELS)/dev-setup +## Run all linting checks +lint: $(SENTINELS) + pre-commit run --all-files + ## Run all tests -test: $(SENTINELS)/dev-setup +test: $(SENTINELS) $(PYTEST) $(PYTEST_EXTRA_ARGS) $(PACKAGE_DIRS) $(TESTS_DIR) ## Build a local Docker image @@ -77,13 +77,14 @@ $(SENTINELS): mkdir $@ $(SENTINELS)/dist-setup: | $(SENTINELS) - $(PIP) install -U pip wheel twine pre-commit + $(PIP) install -U wheel twine @touch $@ $(SENTINELS)/dist: $(SENTINELS)/dist-setup $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION).tar.gz $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION)-py3-none-any.whl | $(SENTINELS) @touch $@ -$(SENTINELS)/dev-setup: requirements/main.txt requirements/dev.txt | $(SENTINELS) +$(SENTINELS)/dev-setup: init requirements/main.txt requirements/dev.txt | $(SENTINELS) + $(PIP) install -U pip pip-tools pre-commit tox $(PIP) install -r requirements/main.txt $(PIP) install -e . $(PIP) install -r requirements/dev.txt diff --git a/requirements/dev.in b/requirements/dev.in index f8305f0..d1239a5 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -10,6 +10,8 @@ pytest-env pytest-cov pytest-vcr +cloud-storage-mocker + pytz types-pytz types-jwt diff --git a/requirements/dev.txt b/requirements/dev.txt index 0768048..ae10b02 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -12,9 +12,9 @@ babel==2.14.0 # via sphinx beautifulsoup4==4.12.2 # via furo -boto3-stubs==1.34.16 +boto3-stubs==1.34.19 # via -r requirements/dev.in -botocore-stubs==1.34.16 +botocore-stubs==1.34.19 # via # -r requirements/dev.in # boto3-stubs @@ -39,6 +39,8 @@ click==8.1.7 # via # -c requirements/main.txt # pip-tools +cloud-storage-mocker==0.3.1 + # via -r requirements/dev.in colorama==0.4.6 # via tox commonmark==0.9.1 @@ -62,12 +64,41 @@ flake8==7.0.0 # via -r requirements/dev.in furo==2023.9.10 # via -r requirements/dev.in -google-auth==2.26.1 +google-api-core==2.15.0 + # via + # -c requirements/main.txt + # google-cloud-core + # google-cloud-storage +google-auth==2.26.2 # via # -c requirements/main.txt + # google-api-core # google-auth-stubs + # google-cloud-core + # google-cloud-storage google-auth-stubs==0.2.0 # via -r requirements/dev.in +google-cloud-core==2.4.1 + # via + # -c requirements/main.txt + # google-cloud-storage +google-cloud-storage==2.14.0 + # via + # -c requirements/main.txt + # cloud-storage-mocker +google-crc32c==1.5.0 + # via + # -c requirements/main.txt + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.0 + # via + # -c requirements/main.txt + # google-cloud-storage +googleapis-common-protos==1.62.0 + # via + # -c requirements/main.txt + # google-api-core grpc-stubs==1.53.0.5 # via google-auth-stubs grpcio==1.60.0 @@ -117,6 +148,11 @@ pluggy==1.3.0 # via # pytest # tox +protobuf==4.25.2 + # via + # -c requirements/main.txt + # google-api-core + # googleapis-common-protos pyasn1==0.5.1 # via # -c requirements/main.txt @@ -167,6 +203,8 @@ recommonmark==0.7.1 requests==2.31.0 # via # -c requirements/main.txt + # google-api-core + # google-cloud-storage # sphinx rsa==4.9 # via @@ -183,28 +221,23 @@ sphinx==7.2.6 # recommonmark # sphinx-autodoc-typehints # sphinx-basic-ng - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml sphinx-autodoc-typehints==1.25.2 # via -r requirements/dev.in sphinx-basic-ng==1.0.0b2 # via furo -sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -tox==4.11.4 +tox==4.12.0 # via -r requirements/dev.in types-awscrt==0.20.0 # via botocore-stubs diff --git a/requirements/main.txt b/requirements/main.txt index f6e12e5..8b5d0b8 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -12,9 +12,9 @@ azure-storage-blob==12.19.0 # via -r requirements/main.in blinker==1.7.0 # via flask -boto3==1.34.16 +boto3==1.34.19 # via -r requirements/main.in -botocore==1.34.16 +botocore==1.34.19 # via # boto3 # s3transfer @@ -47,7 +47,7 @@ google-api-core==2.15.0 # via # google-cloud-core # google-cloud-storage -google-auth==2.26.1 +google-auth==2.26.2 # via # google-api-core # google-cloud-core From a6b8233edc6307554117b0043d6a10b047043eb1 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 15 Jan 2024 09:42:18 -0700 Subject: [PATCH 4/5] Slightly refactor tests --- tests/storage/__init__.py | 30 +++++++----------------------- tests/storage/test_amazon_s3.py | 26 ++++++++++++-------------- 2 files changed, 19 insertions(+), 37 deletions(-) diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py index 08c3b12..d67b2d5 100644 --- a/tests/storage/__init__.py +++ b/tests/storage/__init__.py @@ -1,4 +1,5 @@ import io +from abc import ABC from typing import Any, cast import pytest @@ -14,14 +15,8 @@ # for storage classes here. That should be refactored sometime. -class _CommonStorageAbstractTests: - """Common tests for all storage backend types and interfaces. - - This should not be used directly, because it is inherited by other - AbstractTest test suites. - - Perhaps that means that we should make this an ABC? - """ +class _CommonStorageAbstractTests(ABC): # noqa: B024 + """Common tests for all storage backend types and interfaces.""" def test_get_size(self, storage_backend: StreamingStorage) -> None: """Test getting the size of a stored object.""" @@ -53,14 +48,9 @@ def test_exists_not_exists( assert not storage_backend.exists("org/repo", ARBITRARY_OID) -class _VerifiableStorageAbstractTests: +class _VerifiableStorageAbstractTests(ABC): # noqa: B024 """Mixin class for other base storage adapter test classes that implement VerifiableStorage. - - This should not be used directly, because it is inherited by other - AbstractTest test suites. - - Perhaps that means this should be an ABC? """ def test_verify_object_ok(self, storage_backend: StreamingStorage) -> None: @@ -88,7 +78,7 @@ def test_verify_object_not_there( class StreamingStorageAbstractTests( - _CommonStorageAbstractTests, _VerifiableStorageAbstractTests + _CommonStorageAbstractTests, _VerifiableStorageAbstractTests, ABC ): """Mixin for testing the StreamingStorage methods of a backend that implements StreamingStorage. @@ -96,8 +86,6 @@ class StreamingStorageAbstractTests( To use, create a concrete test class mixing this class in, and define a fixture named ``storage_backend`` that returns an appropriate storage backend object. - - Again, perhaps this should be defined as an ABC? """ def test_put_get_object(self, storage_backend: StreamingStorage) -> None: @@ -135,24 +123,20 @@ class ExternalStorageAbstractTests( Again, perhaps this should be defined as an ABC? """ - def test_get_upload_action( - self, storage_backend: ExternalStorage - ) -> dict[str, Any]: + def test_get_upload_action(self, storage_backend: ExternalStorage) -> None: action_spec = storage_backend.get_upload_action( "org/repo", ARBITRARY_OID, 100, 3600 ) upload = cast(dict[str, Any], action_spec["actions"]["upload"]) assert upload["href"][0:4] == "http" assert upload["expires_in"] == 3600 - return upload def test_get_download_action( self, storage_backend: ExternalStorage - ) -> dict[str, Any]: + ) -> None: action_spec = storage_backend.get_download_action( "org/repo", ARBITRARY_OID, 100, 7200 ) download = cast(dict[str, Any], action_spec["actions"]["download"]) assert download["href"][0:4] == "http" assert download["expires_in"] == 7200 - return download diff --git a/tests/storage/test_amazon_s3.py b/tests/storage/test_amazon_s3.py index 78ed13f..edea687 100644 --- a/tests/storage/test_amazon_s3.py +++ b/tests/storage/test_amazon_s3.py @@ -10,12 +10,11 @@ from giftless.storage import ExternalStorage from giftless.storage.amazon_s3 import AmazonS3Storage -from . import ( - ARBITRARY_OID, - ExternalStorageAbstractTests, - StreamingStorageAbstractTests, -) +from . import ExternalStorageAbstractTests, StreamingStorageAbstractTests +ARBITRARY_OID = ( + "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" +) TEST_AWS_S3_BUCKET_NAME = "test-giftless" @@ -75,16 +74,15 @@ def vcr_config() -> dict[str, Any]: class TestAmazonS3StorageBackend( StreamingStorageAbstractTests, ExternalStorageAbstractTests ): - # This is gross. Because we're extending the classes, it has to return - # the same thing, and upload uses the test method and returns a value - # which is not how tests usually work, and ugh. - def test_get_upload_action( - self, storage_backend: ExternalStorage - ) -> dict[str, Any]: - upload = super().test_get_upload_action(storage_backend) - + def test_get_upload_action(self, storage_backend: ExternalStorage) -> None: + # A little duplication is better than a test that returns a value. + action_spec = storage_backend.get_upload_action( + "org/repo", ARBITRARY_OID, 100, 3600 + ) + upload = action_spec["actions"]["upload"] + assert upload["href"][0:4] == "http" + assert upload["expires_in"] == 3600 assert upload["header"]["Content-Type"] == "application/octet-stream" b64_oid = upload["header"]["x-amz-checksum-sha256"] assert b64decode(b64_oid) == unhexlify(ARBITRARY_OID) - return upload # yuck From 812c519439af4d4a33f8b8d29181e90c04a21c4e Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 15 Jan 2024 12:51:56 -0700 Subject: [PATCH 5/5] Add simple mock for Google Cloud Storage tests --- giftless/storage/google_cloud.py | 4 +- tests/mocks/__init__.py | 0 tests/mocks/google_cloud_storage.py | 83 +++++++++++++ tests/storage/test_google_cloud.py | 175 ++++++++++------------------ 4 files changed, 147 insertions(+), 115 deletions(-) create mode 100644 tests/mocks/__init__.py create mode 100644 tests/mocks/google_cloud_storage.py diff --git a/giftless/storage/google_cloud.py b/giftless/storage/google_cloud.py index ec10198..e58772e 100644 --- a/giftless/storage/google_cloud.py +++ b/giftless/storage/google_cloud.py @@ -9,8 +9,8 @@ from typing import Any, BinaryIO, cast import google.auth +import google.cloud from google.auth import impersonated_credentials -from google.cloud import storage from google.oauth2 import service_account from giftless.storage import ExternalStorage, StreamingStorage @@ -40,7 +40,7 @@ def __init__( | impersonated_credentials.Credentials | None ) = self._load_credentials(account_key_file, account_key_base64) - self.storage_client = storage.Client( + self.storage_client = google.cloud.storage.Client( project=project_name, credentials=self.credentials ) if not self.credentials: diff --git a/tests/mocks/__init__.py b/tests/mocks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/mocks/google_cloud_storage.py b/tests/mocks/google_cloud_storage.py new file mode 100644 index 0000000..e901d44 --- /dev/null +++ b/tests/mocks/google_cloud_storage.py @@ -0,0 +1,83 @@ +"""Mock for google_cloud_storage that just uses a temporary directory +rather than talking to Google. This effectively makes it a LocalStorage +implementation, of course. +""" + +import shutil +from pathlib import Path +from typing import Any, BinaryIO + +from giftless.storage.exc import ObjectNotFoundError +from giftless.storage.google_cloud import GoogleCloudStorage + + +class MockGoogleCloudStorage(GoogleCloudStorage): + """Mocks a GoogleCloudStorage object by simulating it with a local + directory. + """ + + def __init__( + self, + project_name: str, + bucket_name: str, + path: Path, + account_key_file: str | None = None, + account_key_base64: str | None = None, + path_prefix: str | None = None, + serviceaccount_email: str | None = None, + **_: Any, + ) -> None: + super().__init__( + project_name=project_name, + bucket_name=bucket_name, + account_key_file=account_key_file, + account_key_base64=account_key_base64, + serviceaccount_email=serviceaccount_email, + ) + self._path = path + + def _get_blob_path(self, prefix: str, oid: str) -> str: + return str(self._get_blob_pathlib_path(prefix, oid)) + + def _get_blob_pathlib_path(self, prefix: str, oid: str) -> Path: + return Path(self._path / Path(prefix) / oid) + + @staticmethod + def _create_path(spath: str) -> None: + path = Path(spath) + if not path.is_dir(): + path.mkdir(parents=True) + + def _get_signed_url( + self, + prefix: str, + oid: str, + expires_in: int, + http_method: str = "GET", + filename: str | None = None, + disposition: str | None = None, + ) -> str: + return f"https://example.com/signed_blob/{prefix}/{oid}" + + def get(self, prefix: str, oid: str) -> BinaryIO: + obj = self._get_blob_pathlib_path(prefix, oid) + if not obj.exists(): + raise ObjectNotFoundError("Object does not exist") + return obj.open("rb") + + def put(self, prefix: str, oid: str, data_stream: BinaryIO) -> int: + path = self._get_blob_pathlib_path(prefix, oid) + directory = path.parent + self._create_path(str(directory)) + with path.open("bw") as dest: + shutil.copyfileobj(data_stream, dest) + return dest.tell() + + def exists(self, prefix: str, oid: str) -> bool: + return self._get_blob_pathlib_path(prefix, oid).is_file() + + def get_size(self, prefix: str, oid: str) -> int: + if not self.exists(prefix, oid): + raise ObjectNotFoundError("Object does not exist") + path = self._get_blob_pathlib_path(prefix, oid) + return path.stat().st_size diff --git a/tests/storage/test_google_cloud.py b/tests/storage/test_google_cloud.py index 7abad44..bc3bf6e 100644 --- a/tests/storage/test_google_cloud.py +++ b/tests/storage/test_google_cloud.py @@ -1,129 +1,78 @@ """Tests for the Google Cloud Storage storage backend.""" -import os -from collections.abc import Generator -from typing import Any +from pathlib import Path +import google.cloud.storage # noqa: F401 (used implicitly by storage backend) import pytest -from google.api_core.exceptions import GoogleAPIError -from giftless.storage.google_cloud import GoogleCloudStorage +from ..mocks.google_cloud_storage import MockGoogleCloudStorage +from . import ExternalStorageAbstractTests, StreamingStorageAbstractTests MOCK_GCP_PROJECT_NAME = "giftless-tests" -MOCK_GCP_BUCKET_NAME = "giftless-tests-20200818" +MOCK_GCP_BUCKET_NAME = "giftless-tests-20240115" # This is a valid but revoked key that we use in testing MOCK_GCP_KEY_B64 = ( - "ewogICJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiZ2lmdGxlc3MtdGVz" - "dHMiLAogICJwcml2YXRlX2tleV9pZCI6ICI4MWRhNDcxNzhiYzhmYjE1MDU1NTg3OWRjZTczZThmZDlm" - "OWI4NmJkIiwKICAicHJpdmF0ZV9rZXkiOiAiLS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tXG5NSUlF" - "dkFJQkFEQU5CZ2txaGtpRzl3MEJBUUVGQUFTQ0JLWXdnZ1NpQWdFQUFvSUJBUUNsYXdDOUEvZHBnbVJW" - "XG5kYVg2UW5xY1N6YW5ueTdCVlgwVklwUHVjNzl2aFR2NWRwZXRaa29SQmV6Uzg2ZStHUHVyTmJIMU9r" - "WEZrL2tkXG5SNHFqMDV6SXlYeWxiQUVxSk1BV24zZFY0VUVRVFlmRitPY0ltZUxpcjR3cW9pTldDZDNJ" - "aHErNHVVeU1WRDMxXG5wc1FlcWVxcWV6bVoyNG1oTjBLK2NQczNuSXlIK0lzZXFsWjJob3U3bUU3U2Js" - "YXdjc04ramcyNmQ5YzFUZlpoXG42eFozVkpndGFtcUZvdlZmbEZwNFVvLy9tVGo0cXEwUWRUYk9SS1NE" - "eVkxTWhkQ24veSsyaForVm9IUitFM0Z4XG5XRmc2VGFwRGJhc29heEp5YjRoZEFFK0JhbW14bklTL09G" - "bElaMGVoL2tsRmlBTlJRMEpQb2dXRjFjVE9NcVFxXG4wMlVFV2V5ckFnTUJBQUVDZ2dFQUJNOE5odGVQ" - "NElhTEUxd2haclN0cEp5NWltMGgxenFXTVlCTU85WDR4KzJUXG5PZmRUYStLbWtpcUV1c1UyanNJdks1" - "VUJPakVBcncxVU1RYnBaaEtoTDhub2c3dGkyNjVoMG1Ba1pzWlZOWHU0XG5UKzQ4REZ4YzQ4THlzaktX" - "M1RCQVBSb2RRbkJLTVA3MnY4QThKMU5BYlMwZ3IvTW1TbEVidm1tT2FuTU9ONXAwXG43djlscm9GMzFO" - "akMzT05OY25pUlRYY01xT2tEbWt5LyszeVc2RldMMkJZV3RwcGN3L0s1TnYxdGNMTG5iajVhXG5Hc3dV" - "MENtQXgyTEVoWEo0bndJaWlFR3h6UGZYVXNLcVhLL2JEZENKbDUzMTgraU9aSHNrdXR1OFlqQVpsdktp" - "XG5yckNFUkFXZitLeTZ0WGhnKzJIRzJJbUc5cG8wRnUwTGlIU0ZVUURKY1FLQmdRRFQ5RDJEYm9SNWFG" - "WW0wQlVSXG5vNGd4OHZGc0NyTEx0a09EZGx3U2wrT20yblFvY0JXSTEyTmF5QXRlL2xhVFZNRlorVks1" - "bU9vYXl2WnljTU1YXG5SdXZJYmdCTFdHYkdwSXdXZnlDOGxRZEJYM09xZTZZSzZTMUU2VnNYUVN0aHQ0" - "YUx3ZGpGQ2J6VU1lc1ZzREV5XG5FYU85aXlTUVlFTmFTN2V3amFzNUFVU1F0d0tCZ1FESHl4WUp3bWxp" - "QzE4NEVyZ3lZSEFwYm9weXQzSVkzVGFKXG5yV2MrSGw5WDNzVEJzaFVQYy85SmhjanZKYWVzMlhrcEEw" - "YmY5cis1MEcxUndua3dMWHZsbDJSU0FBNE05TG4rXG45cVlsNEFXNU9QVTVJS0tKYVk1c0kzSHdXTXd6" - "elRya3FBV3hNallJME9OSnBaWUVnSTVKN09sek1jYnhLREZxXG51MmpjYkFubnJRS0JnRlUxaklGSkxm" - "TE5FazE2Tys0aWF6K0Jack5EdmN1TjA2aUhMYzYveDJLdDBpTHJwSXlsXG40cWg5WWF6bjNSQlA4NGRq" - "WjNGNzJ5bTRUTW1ITWJjcTZPRmo3N1JhcnI3UEtnNWxQMWp4Sk1DUVNpVFFudGttXG5FdS93VEpHVnZv" - "WURUUkRrZG13SVZTU05pTy9vTEc3dmpuOUY4QVltM1F6eEFjRDF3MDhnaGxzVEFvR0FidUthXG4vNTJq" - "eVdPUVhGbWZXMjVFc2VvRTh2ZzNYZTlnZG5jRUJ1anFkNlZPeEVYbkJHV1h1U0dFVEo0MGVtMVVubHVR" - "XG5PWHNFRzhlKzlKS2ZtZ3FVYWU5bElWR2dlclpVaUZveUNuRlVHK0d0MEIvNXRaUWRGSTF6ampacVZ4" - "Ry9idXFHXG5CanRjMi9XN1A4T2tDQ21sVHdncTVPRXFqZXVGeWJ2cnpmSTBhUjBDZ1lCdVlYWm5MMm1x" - "eVNma0FnaGswRVVmXG5XeElDb1FmRDdCQlJBV3lmL3VwRjQ2NlMvRmhONUVreG5vdkZ2RlZyQjU1SHVH" - "RTh2Qk4vTEZNVXlPU0xXQ0lIXG5RUG9ZcytNM0NLdGJWTXMxY1h2Tm5tZFRhMnRyYjQ0SlQ5ZlFLbkVw" - "a2VsbUdPdXJMNEVMdmFyUEFyR0x4VllTXG5jWFo1a1FBUy9GeGhFSDZSbnFSalFnPT1cbi0tLS0tRU5E" - "IFBSSVZBVEUgS0VZLS0tLS1cbiIsCiAgImNsaWVudF9lbWFpbCI6ICJzb21lLXNlcnZpY2UtYWNjb3Vu" - "dEBnaWZ0bGVzcy10ZXN0cy5pYW0uZ3NlcnZpY2VhY2NvdW50LmNvbSIsCiAgImNsaWVudF9pZCI6ICIx" - "MDk4NTYwMjgzNDI5MDI4ODI3MTUiLAogICJhdXRoX3VyaSI6ICJodHRwczovL2FjY291bnRzLmdvb2ds" - "ZS5jb20vby9vYXV0aDIvYXV0aCIsCiAgInRva2VuX3VyaSI6ICJodHRwczovL29hdXRoMi5nb29nbGVh" - "cGlzLmNvbS90b2tlbiIsCiAgImF1dGhfcHJvdmlkZXJfeDUwOV9jZXJ0X3VybCI6ICJodHRwczovL3d3" - "dy5nb29nbGVhcGlzLmNvbS9vYXV0aDIvdjEvY2VydHMiLAogICJjbGllbnRfeDUwOV9jZXJ0X3VybCI6" - "ICJodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9yb2JvdC92MS9tZXRhZGF0YS94NTA5L3NvbWUtc2Vy" - "dmljZS1hY2NvdW50JTQwZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iCn0K" + "ewogICJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiZ2lmdGxl" + "c3MtdGVzdHMiLAogICJwcml2YXRlX2tleV9pZCI6ICI4MWRhNDcxNzhiYzhmYjE1MDU1NTg3" + "OWRjZTczZThmZDlmOWI4NmJkIiwKICAicHJpdmF0ZV9rZXkiOiAiLS0tLS1CRUdJTiBQUklW" + "QVRFIEtFWS0tLS0tXG5NSUlFdkFJQkFEQU5CZ2txaGtpRzl3MEJBUUVGQUFTQ0JLWXdnZ1Np" + "QWdFQUFvSUJBUUNsYXdDOUEvZHBnbVJWXG5kYVg2UW5xY1N6YW5ueTdCVlgwVklwUHVjNzl2" + "aFR2NWRwZXRaa29SQmV6Uzg2ZStHUHVyTmJIMU9rWEZrL2tkXG5SNHFqMDV6SXlYeWxiQUVx" + "Sk1BV24zZFY0VUVRVFlmRitPY0ltZUxpcjR3cW9pTldDZDNJaHErNHVVeU1WRDMxXG5wc1Fl" + "cWVxcWV6bVoyNG1oTjBLK2NQczNuSXlIK0lzZXFsWjJob3U3bUU3U2JsYXdjc04ramcyNmQ5" + "YzFUZlpoXG42eFozVkpndGFtcUZvdlZmbEZwNFVvLy9tVGo0cXEwUWRUYk9SS1NEeVkxTWhk" + "Q24veSsyaForVm9IUitFM0Z4XG5XRmc2VGFwRGJhc29heEp5YjRoZEFFK0JhbW14bklTL09G" + "bElaMGVoL2tsRmlBTlJRMEpQb2dXRjFjVE9NcVFxXG4wMlVFV2V5ckFnTUJBQUVDZ2dFQUJN" + "OE5odGVQNElhTEUxd2haclN0cEp5NWltMGgxenFXTVlCTU85WDR4KzJUXG5PZmRUYStLbWtp" + "cUV1c1UyanNJdks1VUJPakVBcncxVU1RYnBaaEtoTDhub2c3dGkyNjVoMG1Ba1pzWlZOWHU0" + "XG5UKzQ4REZ4YzQ4THlzaktXM1RCQVBSb2RRbkJLTVA3MnY4QThKMU5BYlMwZ3IvTW1TbEVi" + "dm1tT2FuTU9ONXAwXG43djlscm9GMzFOakMzT05OY25pUlRYY01xT2tEbWt5LyszeVc2RldM" + "MkJZV3RwcGN3L0s1TnYxdGNMTG5iajVhXG5Hc3dVMENtQXgyTEVoWEo0bndJaWlFR3h6UGZY" + "VXNLcVhLL2JEZENKbDUzMTgraU9aSHNrdXR1OFlqQVpsdktpXG5yckNFUkFXZitLeTZ0WGhn" + "KzJIRzJJbUc5cG8wRnUwTGlIU0ZVUURKY1FLQmdRRFQ5RDJEYm9SNWFGWW0wQlVSXG5vNGd4" + "OHZGc0NyTEx0a09EZGx3U2wrT20yblFvY0JXSTEyTmF5QXRlL2xhVFZNRlorVks1bU9vYXl2" + "WnljTU1YXG5SdXZJYmdCTFdHYkdwSXdXZnlDOGxRZEJYM09xZTZZSzZTMUU2VnNYUVN0aHQ0" + "YUx3ZGpGQ2J6VU1lc1ZzREV5XG5FYU85aXlTUVlFTmFTN2V3amFzNUFVU1F0d0tCZ1FESHl4" + "WUp3bWxpQzE4NEVyZ3lZSEFwYm9weXQzSVkzVGFKXG5yV2MrSGw5WDNzVEJzaFVQYy85Smhj" + "anZKYWVzMlhrcEEwYmY5cis1MEcxUndua3dMWHZsbDJSU0FBNE05TG4rXG45cVlsNEFXNU9Q" + "VTVJS0tKYVk1c0kzSHdXTXd6elRya3FBV3hNallJME9OSnBaWUVnSTVKN09sek1jYnhLREZx" + "XG51MmpjYkFubnJRS0JnRlUxaklGSkxmTE5FazE2Tys0aWF6K0Jack5EdmN1TjA2aUhMYzYv" + "eDJLdDBpTHJwSXlsXG40cWg5WWF6bjNSQlA4NGRqWjNGNzJ5bTRUTW1ITWJjcTZPRmo3N1Jh" + "cnI3UEtnNWxQMWp4Sk1DUVNpVFFudGttXG5FdS93VEpHVnZvWURUUkRrZG13SVZTU05pTy9v" + "TEc3dmpuOUY4QVltM1F6eEFjRDF3MDhnaGxzVEFvR0FidUthXG4vNTJqeVdPUVhGbWZXMjVF" + "c2VvRTh2ZzNYZTlnZG5jRUJ1anFkNlZPeEVYbkJHV1h1U0dFVEo0MGVtMVVubHVRXG5PWHNF" + "RzhlKzlKS2ZtZ3FVYWU5bElWR2dlclpVaUZveUNuRlVHK0d0MEIvNXRaUWRGSTF6ampacVZ4" + "Ry9idXFHXG5CanRjMi9XN1A4T2tDQ21sVHdncTVPRXFqZXVGeWJ2cnpmSTBhUjBDZ1lCdVlY" + "Wm5MMm1xeVNma0FnaGswRVVmXG5XeElDb1FmRDdCQlJBV3lmL3VwRjQ2NlMvRmhONUVreG5v" + "dkZ2RlZyQjU1SHVHRTh2Qk4vTEZNVXlPU0xXQ0lIXG5RUG9ZcytNM0NLdGJWTXMxY1h2Tm5t" + "ZFRhMnRyYjQ0SlQ5ZlFLbkVwa2VsbUdPdXJMNEVMdmFyUEFyR0x4VllTXG5jWFo1a1FBUy9G" + "eGhFSDZSbnFSalFnPT1cbi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS1cbiIsCiAgImNsaWVu" + "dF9lbWFpbCI6ICJzb21lLXNlcnZpY2UtYWNjb3VudEBnaWZ0bGVzcy10ZXN0cy5pYW0uZ3Nl" + "cnZpY2VhY2NvdW50LmNvbSIsCiAgImNsaWVudF9pZCI6ICIxMDk4NTYwMjgzNDI5MDI4ODI3" + "MTUiLAogICJhdXRoX3VyaSI6ICJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20vby9vYXV0" + "aDIvYXV0aCIsCiAgInRva2VuX3VyaSI6ICJodHRwczovL29hdXRoMi5nb29nbGVhcGlzLmNv" + "bS90b2tlbiIsCiAgImF1dGhfcHJvdmlkZXJfeDUwOV9jZXJ0X3VybCI6ICJodHRwczovL3d3" + "dy5nb29nbGVhcGlzLmNvbS9vYXV0aDIvdjEvY2VydHMiLAogICJjbGllbnRfeDUwOV9jZXJ0" + "X3VybCI6ICJodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9yb2JvdC92MS9tZXRhZGF0YS94" + "NTA5L3NvbWUtc2VydmljZS1hY2NvdW50JTQwZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNl" + "YWNjb3VudC5jb20iCn0K" ) @pytest.fixture -def storage_backend() -> Generator[GoogleCloudStorage, None, None]: - """Provide a Google Cloud Storage backend for all GCS tests. - - For this to work against production Google Cloud, you need to set - ``GCP_ACCOUNT_KEY_FILE``, ``GCP_PROJECT_NAME`` and ``GCP_BUCKET_NAME`` - environment variables when running the tests. - - If these variables are not set, and pytest-vcr is not in use, the - tests *will* fail. - """ - account_key_file = os.environ.get("GCP_ACCOUNT_KEY_FILE") - project_name = os.environ.get("GCP_PROJECT_NAME") - bucket_name = os.environ.get("GCP_BUCKET_NAME") - prefix = "giftless-tests" - - if account_key_file and project_name and bucket_name: - # We use a live GCS bucket to test - storage = GoogleCloudStorage( - project_name=project_name, - bucket_name=bucket_name, - account_key_file=account_key_file, - path_prefix=prefix, - ) - try: - yield storage - finally: - bucket = storage.storage_client.bucket(bucket_name) - try: - blobs = bucket.list_blobs(prefix=prefix + "/") - bucket.delete_blobs(blobs) - except GoogleAPIError as e: - raise pytest.PytestWarning( - f"Could not clean up after test: {e}" - ) from None - else: - yield GoogleCloudStorage( - project_name=MOCK_GCP_PROJECT_NAME, - bucket_name=MOCK_GCP_BUCKET_NAME, - account_key_base64=MOCK_GCP_KEY_B64, - path_prefix=prefix, - ) - - -@pytest.fixture(scope="module") -def vcr_config() -> dict[str, Any]: - live_tests = bool( - os.environ.get("GCP_ACCOUNT_KEY_FILE") - and os.environ.get("GCP_PROJECT_NAME") - and os.environ.get("GCP_BUCKET_NAME") +def storage_backend( + storage_path: Path, +) -> MockGoogleCloudStorage: + """Provide a mock Google Cloud Storage backend for all GCS tests.""" + return MockGoogleCloudStorage( + project_name=MOCK_GCP_PROJECT_NAME, + bucket_name=MOCK_GCP_BUCKET_NAME, + account_key_base64=MOCK_GCP_KEY_B64, + path=storage_path, ) - mode = "once" if live_tests else "none" - return { - "filter_headers": [("authorization", "fake-authz-header")], - "record_mode": mode, - } -# TODO @athornton: updating the storage backends has caused the VCR cassettes -# to become invalid. Datopian will need to rebuild those cassettes with data -# from the current implementation, or (better) we should use something other -# than pytest-vcr, which is opaque and unhelpful. -# -# I can confirm that the Google Cloud Storage Backend at least works in -# conjunction with Workload Identity, since I'm using that for my own storage -# in my Git LFS implementation. -- AJT 20231220 -# -# @pytest.mark.vcr() -# class TestGoogleCloudStorageBackend( -# StreamingStorageAbstractTests, ExternalStorageAbstractTests -# ): -# pass +class TestGoogleCloudStorageBackend( + StreamingStorageAbstractTests, ExternalStorageAbstractTests +): + pass