Skip to content

Commit

Permalink
✨Implement delete file endpoint (#4707)
Browse files Browse the repository at this point in the history
  • Loading branch information
bisgaard-itis authored Sep 6, 2023
1 parent 7404e61 commit 100492f
Show file tree
Hide file tree
Showing 16 changed files with 2,532 additions and 165 deletions.
4 changes: 2 additions & 2 deletions services/api-server/requirements/_test.in
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@
--constraint _base.txt



aioresponses
alembic
asgi_lifespan
click
docker
faker
jsonref
moto[server] # mock out tests based on AWS-S3
pytest
pytest-asyncio
Expand All @@ -27,4 +28,3 @@ pytest-runner
respx
sqlalchemy[mypy] # adds Mypy / Pep-484 Support for ORM Mappings SEE https://docs.sqlalchemy.org/en/20/orm/extensions/mypy.html
types-boto3
aioresponses
2 changes: 2 additions & 0 deletions services/api-server/requirements/_test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,8 @@ jsonpickle==3.0.2
# via jschema-to-python
jsonpointer==2.4
# via jsonpatch
jsonref==1.1.0
# via -r requirements/_test.in
jsonschema==3.2.0
# via
# -c requirements/./constraints.txt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,13 @@
from ..._meta import API_VTAG
from ...models.pagination import Page, PaginationParams
from ...models.schemas.errors import ErrorGet
from ...models.schemas.files import ClientFile, ClientFileUploadSchema, File
from ...models.schemas.files import (
ClientFile,
ClientFileUploadData,
File,
FileUploadData,
UploadLinks,
)
from ...services.storage import StorageApi, StorageFileMetaData, to_file_api_model
from ..dependencies.authentication import get_current_user_id
from ..dependencies.services import get_api_client
Expand Down Expand Up @@ -187,7 +193,7 @@ async def upload_files(files: list[UploadFile] = FileParam(...)):

@router.post(
"/content",
response_model=ClientFileUploadSchema,
response_model=ClientFileUploadData,
include_in_schema=API_SERVER_DEV_FEATURES_ENABLED,
)
@cancel_on_disconnect
Expand All @@ -212,20 +218,18 @@ async def get_upload_links(
file_size=ByteSize(client_file.filesize),
is_directory=False,
)

query = f"{upload_links.links.complete_upload.query}".removesuffix(":complete")
url = request.url_for(
completion_url: URL = request.url_for(
"complete_multipart_upload", file_id=file_meta.id
).include_query_params(**dict(item.split("=") for item in query.split("&")))
upload_links.links.complete_upload = parse_obj_as(AnyUrl, f"{url}")

query = f"{upload_links.links.abort_upload.query}".removesuffix(":abort")
url = request.url_for(
"abort_multipart_upload", file_id=file_meta.id
).include_query_params(**dict(item.split("=") for item in query.split("&")))
upload_links.links.abort_upload = parse_obj_as(AnyUrl, f"{url}")

return ClientFileUploadSchema(file_id=file_meta.id, upload_schema=upload_links)
)
abort_url: URL = request.url_for("abort_multipart_upload", file_id=file_meta.id)
upload_data: FileUploadData = FileUploadData(
chunk_size=upload_links.chunk_size,
urls=upload_links.urls,
links=UploadLinks(
complete_upload=completion_url.path, abort_upload=abort_url.path
),
)
return ClientFileUploadData(file_id=file_meta.id, upload_schema=upload_data)


@router.get(
Expand Down Expand Up @@ -264,19 +268,19 @@ async def get_file(

@router.delete(
"/{file_id}",
status_code=status.HTTP_204_NO_CONTENT,
responses={**_COMMON_ERROR_RESPONSES},
include_in_schema=API_SERVER_DEV_FEATURES_ENABLED,
)
async def delete_file(
file_id: UUID,
storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))],
user_id: Annotated[int, Depends(get_current_user_id)],
storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))],
):
assert storage_client # nsoec

msg = f"delete file {file_id=} of {user_id=}. SEE https://github.com/ITISFoundation/osparc-issues/issues/952"
raise NotImplementedError(msg)
file: File = await get_file(
file_id=file_id, storage_client=storage_client, user_id=user_id
)
await storage_client.delete_file(
user_id=user_id, quoted_storage_file_id=file.quoted_storage_file_id
)


@router.post(
Expand All @@ -294,7 +298,7 @@ async def abort_multipart_upload(
assert user_id # nosec
file: File = File(id=file_id, filename=client_file.filename, checksum=None)
abort_link: URL = await storage_client.create_abort_upload_link(
file, query=dict(request.query_params)
file, query={"user_id": str(user_id)}
)
await abort_upload(abort_upload_link=parse_obj_as(AnyUrl, str(abort_link)))

Expand All @@ -318,7 +322,7 @@ async def complete_multipart_upload(

file: File = File(id=file_id, filename=client_file.filename, checksum=None)
complete_link: URL = await storage_client.create_complete_upload_link(
file, dict(request.query_params)
file, {"user_id": str(user_id)}
)

e_tag: ETag = await complete_file_upload(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,16 @@

import aiofiles
from fastapi import UploadFile
from models_library.api_schemas_storage import FileUploadSchema
from models_library.projects_nodes_io import StorageFileID
from pydantic import BaseModel, ByteSize, ConstrainedStr, Field, parse_obj_as, validator
from pydantic import (
AnyUrl,
BaseModel,
ByteSize,
ConstrainedStr,
Field,
parse_obj_as,
validator,
)

from ...utils.hash import create_md5_checksum

Expand Down Expand Up @@ -144,8 +151,17 @@ def quoted_storage_file_id(self) -> str:
return _quote(self.storage_file_id, safe="")


class ClientFileUploadSchema(BaseModel):
class UploadLinks(BaseModel):
abort_upload: str
complete_upload: str


class FileUploadData(BaseModel):
chunk_size: ByteSize
urls: list[AnyUrl]
links: UploadLinks


class ClientFileUploadData(BaseModel):
file_id: UUID = Field(..., description="The file resource id")
upload_schema: FileUploadSchema = Field(
..., description="Schema for uploading file"
)
upload_schema: FileUploadData = Field(..., description="Schema for uploading file")
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,13 @@ async def get_download_link(
link: AnyUrl = presigned_link.link
return link

async def delete_file(self, user_id: int, quoted_storage_file_id: str) -> None:
response = await self.client.delete(
f"/locations/{self.SIMCORE_S3_ID}/files/{quoted_storage_file_id}",
params={"user_id": user_id},
)
response.raise_for_status()

async def get_upload_links(
self, user_id: int, file_id: UUID, file_name: str
) -> FileUploadSchema:
Expand All @@ -105,6 +112,7 @@ async def get_upload_links(
f"/locations/{self.SIMCORE_S3_ID}/files/{object_path}",
params={"user_id": user_id, "file_size": 0},
)
response.raise_for_status()

enveloped_data = Envelope[FileUploadSchema].parse_obj(response.json())
assert enveloped_data.data # nosec
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
import logging
import os
from contextlib import suppress
from dataclasses import dataclass
from pathlib import Path

import httpx
from fastapi import FastAPI
from httpx._types import URLTypes
from pydantic import ValidationError
from jsonschema import ValidationError

from .app_data import AppDataMixin
from .http_calls_capture import get_captured_as_json

if os.environ.get("API_SERVER_DEV_HTTP_CALLS_LOGS_PATH"):
from .http_calls_capture import get_captured_as_json
from .http_calls_capture_processing import CaptureProcessingException

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -52,10 +56,14 @@ async def request(self, method: str, url: URLTypes, **kwargs):
_logger.info("Capturing %s ... [might be slow]", capture_name)
try:
capture_json = get_captured_as_json(name=capture_name, response=response)
_capture_logger.info("%s", capture_json)
except ValidationError:
_capture_logger.exception("Failed capturing %s", capture_name)

_capture_logger.info("%s,", capture_json)
except (CaptureProcessingException, ValidationError, httpx.RequestError):
_capture_logger.exception(
"Unexpected failure with %s",
capture_name,
exc_info=True,
stack_info=True,
)
return response


Expand All @@ -65,20 +73,19 @@ async def request(self, method: str, url: URLTypes, **kwargs):


def _setup_capture_logger_once(capture_path: Path) -> None:
"""NOTE: this is only to capture during developmetn"""
"""NOTE: this is only to capture during development"""

if not any(
isinstance(hnd, logging.FileHandler) for hnd in _capture_logger.handlers
):
file_handler = logging.FileHandler(filename=f"{capture_path}")
file_handler.setLevel(logging.INFO)

formatter = logging.Formatter("%(asctime)s - %(message)s")
formatter = logging.Formatter("%(message)s")
file_handler.setFormatter(formatter)

_capture_logger.addHandler(file_handler)
_logger.info("Setup capture logger at %s", capture_path)
_capture_logger.info("Started capture session ...")


def setup_client_instance(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

import httpx
from pydantic import BaseModel, Field
from simcore_service_api_server.utils.http_calls_capture_processing import (
PathDescription,
enhance_from_openapi_spec,
)


class HttpApiCallCaptureModel(BaseModel):
Expand All @@ -15,30 +19,40 @@ class HttpApiCallCaptureModel(BaseModel):
description: str
method: Literal["GET", "PUT", "POST", "PATCH", "DELETE"]
host: str
path: str
path: PathDescription | str
query: str | None = None
request_payload: dict[str, Any] | None = None
response_body: dict[str, Any] | list | None = None
status_code: HTTPStatus = Field(default=HTTPStatus.OK)

@classmethod
def create_from_response(
cls, response: httpx.Response, name: str, description: str = ""
cls,
response: httpx.Response,
name: str,
description: str = "",
enhance_from_openapi_specs: bool = True,
) -> "HttpApiCallCaptureModel":
request = response.request

path: PathDescription | str
if enhance_from_openapi_specs:
path = enhance_from_openapi_spec(response)
else:
path = response.request.url.path

return cls(
name=name,
description=description or f"{request}",
method=request.method,
host=request.url.host,
path=request.url.path,
path=path,
query=request.url.query.decode() or None,
request_payload=json.loads(request.content.decode())
if request.content
else None,
response_body=response.json() if response.content else None,
status_code=response.status_code,
status_code=HTTPStatus(response.status_code),
)

def __str__(self) -> str:
Expand Down
Loading

0 comments on commit 100492f

Please sign in to comment.