diff --git a/deepset_cloud_sdk/_api/files.py b/deepset_cloud_sdk/_api/files.py index 13790a40..9999c5f2 100644 --- a/deepset_cloud_sdk/_api/files.py +++ b/deepset_cloud_sdk/_api/files.py @@ -92,7 +92,6 @@ async def list_paginated( workspace_name: str, limit: int = 100, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, after_value: Optional[Any] = None, after_file_id: Optional[UUID] = None, @@ -103,7 +102,6 @@ async def list_paginated( :param workspace_name: Name of the workspace to use. :param limit: Number of files to return per page. :param name: Name of the file to odata_filter by. - :param content: Content of the file to odata_filter by. :param odata_filter: Odata odata_filter to apply. :param after_value: Value to start after. :param after_file_id: File ID to start after. @@ -119,10 +117,6 @@ async def list_paginated( if name: params["name"] = name - # content search file - if content: - params["content"] = content - # odata odata_filter for file meta if odata_filter: params["filter"] = odata_filter diff --git a/deepset_cloud_sdk/_service/files_service.py b/deepset_cloud_sdk/_service/files_service.py index 6efd44a7..747caf49 100644 --- a/deepset_cloud_sdk/_service/files_service.py +++ b/deepset_cloud_sdk/_service/files_service.py @@ -481,7 +481,6 @@ async def download( workspace_name: str, file_dir: Optional[Union[Path, str]] = None, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, include_meta: bool = True, batch_size: int = 50, @@ -493,7 +492,6 @@ async def download( :param workspace_name: Name of the workspace to upload the files to. It uses the workspace from the .ENV file by default. :param file_dir: Path to the folder to download. If None, the current working directory is used. :param name: odata_filter by file name. - :param content: odata_filter by file content. :param odata_filter: odata_filter by file meta data. :param include_meta: If True, downloads the metadata files as well. :param batch_size: Batch size for the listing. @@ -509,7 +507,6 @@ async def download( await self._files.list_paginated( workspace_name, name=name, - content=content, odata_filter=odata_filter, limit=1, ) @@ -527,7 +524,6 @@ async def download( response = await self._files.list_paginated( workspace_name=workspace_name, name=name, - content=content, odata_filter=odata_filter, limit=batch_size, after_file_id=after_file_id, @@ -641,7 +637,6 @@ async def list_all( self, workspace_name: str, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, batch_size: int = 100, timeout_s: Optional[int] = None, @@ -653,7 +648,6 @@ async def list_all( :param workspace_name: Name of the workspace whose files you want to list. :param name: odata_filter by file name. - :param content: odata_filter by file content. :param odata_filter: odata_filter by file meta data. :param batch_size: Number of files to return per request. :param timeout_s: Timeout in seconds for the listing. @@ -670,7 +664,6 @@ async def list_all( response = await self._files.list_paginated( workspace_name, name=name, - content=content, odata_filter=odata_filter, limit=batch_size, after_file_id=after_file_id, diff --git a/deepset_cloud_sdk/cli.py b/deepset_cloud_sdk/cli.py index c7824687..f3c1c4bd 100644 --- a/deepset_cloud_sdk/cli.py +++ b/deepset_cloud_sdk/cli.py @@ -77,7 +77,6 @@ def download( # pylint: disable=too-many-arguments workspace_name: str = DEFAULT_WORKSPACE_NAME, file_dir: Optional[str] = None, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, include_meta: bool = True, batch_size: int = 50, @@ -90,7 +89,6 @@ def download( # pylint: disable=too-many-arguments :param workspace_name: Name of the workspace to download the files from. Uses the workspace from the .ENV file by default. :param file_dir: Path to the folder where you want to download the files. :param name: Name of the file to odata_filter for. - :param content: Content of the file to odata_filter for. :param odata_filter: odata_filter to apply to the file list. :param include_meta: Downloads metadata of the files. :param batch_size: Batch size for file listing. @@ -102,7 +100,6 @@ def download( # pylint: disable=too-many-arguments workspace_name=workspace_name, file_dir=file_dir, name=name, - content=content, odata_filter=odata_filter, include_meta=include_meta, batch_size=batch_size, @@ -159,7 +156,6 @@ def logout() -> None: def list_files( api_key: Optional[str] = None, api_url: Optional[str] = None, - content: Optional[str] = None, name: Optional[str] = None, odata_filter: Optional[str] = None, workspace_name: str = DEFAULT_WORKSPACE_NAME, @@ -172,7 +168,6 @@ def list_files( :param api_url: API URL to use for authentication. :param workspace_name: Name of the workspace to list the files from. Uses the workspace from the .ENV file by default. :param name: Name of the file to odata_filter for. - :param content: Content of the file to odata_filter for. :param odata_filter: odata_filter to apply to the file list. :param batch_size: Batch size to use for the file list. :param timeout_s: The timeout for this request, in seconds. @@ -192,9 +187,7 @@ def list_files( "created_at", "meta", ] # Assuming the first row contains the headers - for files in sync_list_files( - api_key, api_url, workspace_name, name, content, odata_filter, batch_size, timeout_s - ): + for files in sync_list_files(api_key, api_url, workspace_name, name, odata_filter, batch_size, timeout_s): table = tabulate(files, headers, tablefmt="grid") # type: ignore typer.echo(table) if len(files) > 0: diff --git a/deepset_cloud_sdk/workflows/async_client/files.py b/deepset_cloud_sdk/workflows/async_client/files.py index e062c786..982ad79e 100644 --- a/deepset_cloud_sdk/workflows/async_client/files.py +++ b/deepset_cloud_sdk/workflows/async_client/files.py @@ -32,7 +32,6 @@ async def list_files( api_url: Optional[str] = None, workspace_name: str = DEFAULT_WORKSPACE_NAME, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, batch_size: int = 100, timeout_s: Optional[int] = None, @@ -43,7 +42,6 @@ async def list_files( :param api_url: API URL to use for authentication. :param workspace_name: Name of the workspace to list the files from. It uses the workspace from the .ENV file by default. :param name: Name of the file to odata_filter for. - :param content: Content of the file to odata_filter for. :param odata_filter: The odata_filter to apply to the file list. For example, `odata_filter="category eq 'news'"` lists files with metadata `{"meta": {"category": "news"}}`. :param timeout_s: The timeout in seconds for this API call. @@ -55,7 +53,6 @@ async def list_files( async for file_batch in file_service.list_all( workspace_name=workspace_name, name=name, - content=content, odata_filter=odata_filter, batch_size=batch_size, timeout_s=timeout_s, @@ -167,7 +164,6 @@ async def download( workspace_name: str = DEFAULT_WORKSPACE_NAME, file_dir: Optional[Union[Path, str]] = None, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, include_meta: bool = True, batch_size: int = 50, @@ -183,7 +179,6 @@ async def download( :param workspace_name: Name of the workspace to upload the files to. It uses the workspace from the .ENV file by default. :param file_dir: Path to the folder to download. :param name: Name of the file to odata_filter by. - :param content: Content of a file to odata_filter by. :param odata_filter: odata_filter by file meta data. :param include_meta: Whether to include the file meta in the folder. :param batch_size: Batch size for the listing. @@ -197,7 +192,6 @@ async def download( workspace_name=workspace_name, file_dir=file_dir, name=name, - content=content, odata_filter=odata_filter, include_meta=include_meta, batch_size=batch_size, diff --git a/deepset_cloud_sdk/workflows/sync_client/files.py b/deepset_cloud_sdk/workflows/sync_client/files.py index a52456bb..7d4b4a2c 100644 --- a/deepset_cloud_sdk/workflows/sync_client/files.py +++ b/deepset_cloud_sdk/workflows/sync_client/files.py @@ -89,7 +89,6 @@ def download( # pylint: disable=too-many-arguments workspace_name: str = DEFAULT_WORKSPACE_NAME, file_dir: Optional[Union[Path, str]] = None, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, include_meta: bool = True, batch_size: int = 50, @@ -105,7 +104,6 @@ def download( # pylint: disable=too-many-arguments :param workspace_name: Name of the workspace to upload the files to. It uses the workspace from the .ENV file by default. :param file_dir: Path to the folder to download. :param name: Name of the file to odata_filter by. - :param content: Content of a file to odata_filter by. :param odata_filter: odata_filter by file meta data. :param include_meta: Whether to include the file meta in the folder. :param batch_size: Batch size for the listing. @@ -120,7 +118,6 @@ def download( # pylint: disable=too-many-arguments api_url=api_url, workspace_name=workspace_name, name=name, - content=content, odata_filter=odata_filter, file_dir=file_dir, include_meta=include_meta, @@ -251,7 +248,6 @@ def list_files( api_url: Optional[str] = None, workspace_name: str = DEFAULT_WORKSPACE_NAME, name: Optional[str] = None, - content: Optional[str] = None, odata_filter: Optional[str] = None, batch_size: int = 100, timeout_s: Optional[int] = None, @@ -262,7 +258,6 @@ def list_files( :param api_url: API URL to use for authentication. :param workspace_name: Name of the workspace to list the files from. It uses the workspace from the .ENV file by default. :param name: Name of the file to odata_filter for. - :param content: Content of the file to odata_filter for. :param odata_filter: odata_filter to apply to the file list. For example, `odata_filter="category eq 'news'" lists files with metadata `{"meta": {"category": "news"}}. :param batch_size: Batch size to use for the file list. @@ -275,7 +270,6 @@ def list_files( api_url=api_url, workspace_name=workspace_name, name=name, - content=content, odata_filter=odata_filter, batch_size=batch_size, timeout_s=timeout_s, diff --git a/pyproject.toml b/pyproject.toml index 1bd81251..9c307197 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Programming Language :: Python :: Implementation :: PyPy", ] dependencies = [ - "structlog>=24.4.0, < 25.0.0", + "structlog>=24.0.0", "httpx==0.27.2", "python-dotenv==1.0.1", "typer==0.12.5", @@ -63,6 +63,7 @@ dependencies = [ "tenacity==8.3.0", "aiohttp==3.9.5", "pyrate-limiter==3.6.0", + ] [tool.hatch.envs.test.scripts] @@ -71,12 +72,7 @@ integration = "pytest --cov-report=term-missing --cov-config=pyproject.toml --co [tool.hatch.envs.test] template = 'default' -dependencies = [ - "pytest-cov==4.0.0", - "pytest==7.3.1", - "pytest-asyncio==0.21.0", - "Faker==19.13.0", -] +dependencies = ["pytest-cov==4.0.0", "pytest==7.3.1", "pytest-asyncio==0.21.0"] [tool.hatch.envs.code-quality] @@ -94,6 +90,7 @@ dependencies = [ "types-aiofiles==23.1.0.2", "types-tabulate==0.9.0.2", "autoflake==2.1.1", + ] [tool.hatch.envs.code-quality.scripts] diff --git a/tests/conftest.py b/tests/conftest.py index 6c317afc..f41c840b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,7 +10,8 @@ import pytest import structlog from dotenv import load_dotenv -from faker import Faker + +# from faker import Faker from tenacity import retry, stop_after_delay, wait_fixed from deepset_cloud_sdk._api.config import CommonConfig @@ -111,8 +112,7 @@ def _wait_for_file_to_be_available( @pytest.fixture(scope="session") def workspace_name(integration_config: CommonConfig) -> Generator[str, None, None]: """Create a workspace for the tests and delete it afterwards.""" - fake = Faker() - workspace_name = f"sdktest_{'_'.join(fake.words(3))}" + workspace_name = f"sdktest_{uuid4()}" logger.info("Creating workspace", workspace_name=workspace_name) diff --git a/tests/integration/api/test_integration_files.py b/tests/integration/api/test_integration_files.py index 3048b4b9..74a0123c 100644 --- a/tests/integration/api/test_integration_files.py +++ b/tests/integration/api/test_integration_files.py @@ -31,7 +31,6 @@ async def test_list_paginated( workspace_name=workspace_name, limit=10, name="example0.txt", - content="text", odata_filter="find eq 'me'", ) assert result.total == 1 diff --git a/tests/unit/api/test_files.py b/tests/unit/api/test_files.py index 49e38eed..43f5d595 100644 --- a/tests/unit/api/test_files.py +++ b/tests/unit/api/test_files.py @@ -56,7 +56,6 @@ async def test_list_paginated(self, files_api: FilesAPI, mocked_deepset_cloud_ap workspace_name="test_workspace", limit=10, name="things_1", - content="silly", odata_filter="created_at eq '2022-06-21T16:40:00.634653+00:00' ", ) assert result == FileList( @@ -79,7 +78,6 @@ async def test_list_paginated(self, files_api: FilesAPI, mocked_deepset_cloud_ap params={ "limit": 10, "name": "things_1", - "content": "silly", "filter": "created_at eq '2022-06-21T16:40:00.634653+00:00' ", }, ) diff --git a/tests/unit/service/test_files_service.py b/tests/unit/service/test_files_service.py index e884b8dd..fd72a341 100644 --- a/tests/unit/service/test_files_service.py +++ b/tests/unit/service/test_files_service.py @@ -666,14 +666,12 @@ async def test_download_files_with_filter(self, file_service: FilesService, monk show_progress=False, odata_filter="category eq 'news'", name="asdf", - content="bsdf", batch_size=54, ) mocked_list_paginated.assert_called_once_with( workspace_name="test_workspace", name="asdf", - content="bsdf", odata_filter="category eq 'news'", limit=54, after_file_id=None, @@ -710,7 +708,6 @@ async def test_download_files_with_filter_and_progress_bar( show_progress=True, # This requires a previous cal that checks the total number of files odata_filter="category eq 'news'", name="asdf", - content="bsdf", batch_size=54, ) @@ -718,14 +715,12 @@ async def test_download_files_with_filter_and_progress_bar( call( workspace_name="test_workspace", name="asdf", - content="bsdf", odata_filter="category eq 'news'", limit=54, ), call( workspace_name="test_workspace", name="asdf", - content="bsdf", odata_filter="category eq 'news'", limit=54, after_file_id=None, diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 3cde773c..744d6317 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -109,7 +109,6 @@ def test_download_files(self, sync_download_mock: AsyncMock) -> None: workspace_name="default", file_dir=None, name=None, - content=None, odata_filter=None, include_meta=True, batch_size=50, diff --git a/tests/unit/workflows/async_client/test_async_workflow_files.py b/tests/unit/workflows/async_client/test_async_workflow_files.py index 9a3641be..632b7835 100644 --- a/tests/unit/workflows/async_client/test_async_workflow_files.py +++ b/tests/unit/workflows/async_client/test_async_workflow_files.py @@ -118,7 +118,6 @@ async def test_download_files(self, monkeypatch: MonkeyPatch) -> None: await download( workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", batch_size=100, timeout_s=100, @@ -127,7 +126,6 @@ async def test_download_files(self, monkeypatch: MonkeyPatch) -> None: workspace_name="my_workspace", file_dir=None, name="test_file.txt", - content="test content", odata_filter="test", include_meta=True, batch_size=100, @@ -159,7 +157,6 @@ async def mocked_list_all( async for file_batch in list_files( workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", batch_size=100, timeout_s=100, @@ -188,7 +185,6 @@ async def mocked_list_all( async for file_batch in list_files( workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", batch_size=100, timeout_s=100, diff --git a/tests/unit/workflows/sync_client/test_sync_workflow_files.py b/tests/unit/workflows/sync_client/test_sync_workflow_files.py index 130c0002..52161983 100644 --- a/tests/unit/workflows/sync_client/test_sync_workflow_files.py +++ b/tests/unit/workflows/sync_client/test_sync_workflow_files.py @@ -106,7 +106,6 @@ async def mocked_async_list_files(*args: Any, **kwargs: Any) -> AsyncGenerator[L list_files( workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", batch_size=100, timeout_s=100, @@ -131,7 +130,6 @@ def test_download_files() -> None: download( workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", batch_size=100, timeout_s=100, @@ -141,7 +139,6 @@ def test_download_files() -> None: api_url=None, workspace_name="my_workspace", name="test_file.txt", - content="test content", odata_filter="test", file_dir=None, include_meta=True,