From 10e077190f9f12545bfd1a6963f3efdd38efbb2f Mon Sep 17 00:00:00 2001 From: dantb Date: Sat, 11 Nov 2023 18:23:21 +0100 Subject: [PATCH] Integration test for remote/disk storage copy --- .../delta/plugins/storage/files/Files.scala | 31 ++- .../storage/files/FormDataExtractor.scala | 4 + .../storage/files/model/FileAttributes.scala | 2 +- .../files/model/FileCopyDestination.scala | 7 +- .../plugins/storage/files/model/FileId.scala | 1 + .../storage/files/model/FileRejection.scala | 3 + .../storage/files/routes/FilesRoutes.scala | 249 ++++++++++++------ .../bluebrain/nexus/tests/HttpClient.scala | 40 ++- .../epfl/bluebrain/nexus/tests/Optics.scala | 3 +- .../nexus/tests/kg/CopyFileSpec.scala | 49 ++++ .../nexus/tests/kg/DiskStorageSpec.scala | 4 +- .../nexus/tests/kg/RemoteStorageSpec.scala | 4 +- .../nexus/tests/kg/S3StorageSpec.scala | 2 +- .../nexus/tests/kg/StorageSpec.scala | 6 +- 14 files changed, 289 insertions(+), 116 deletions(-) create mode 100644 tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala index f2e5971db2..dd71efbb86 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala @@ -2,10 +2,8 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files import akka.actor.typed.ActorSystem import akka.actor.{ActorSystem => ClassicActorSystem} -import akka.http.scaladsl.model.BodyPartEntity -//import akka.http.javadsl.model.BodyPartEntity import akka.http.scaladsl.model.ContentTypes.`application/octet-stream` -import akka.http.scaladsl.model.{ContentType, HttpEntity, Uri} +import akka.http.scaladsl.model._ import cats.effect.{Clock, ContextShift, IO, Timer} import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache @@ -201,16 +199,21 @@ final class Files( // TODO comments def copy( - id: FileId, + sourceId: FileId, destination: FileCopyDestination )(implicit c: Caller): IO[FileResource] = { for { - (file, description, sourceEntity) <- fetchSourceFile(id) + _ <- logger.info(s"Fetching source file") + (file, description, sourceEntity) <- fetchSourceFile(sourceId) + _ <- logger.info(s"Fetched source file, fetching destination storage") (pc, storageRef, storage) <- fetchDestinationStorage(destination) + _ <- logger.info(s"Fetched destination storage, validating storage type") _ <- validateStorageTypeForCopy(file.storageType, storage) - iri <- generateId(pc) + iri <- destination.id.fold(generateId(pc))(_.expandIri(fetchContext.onCreate).map(_._1)) + _ <- logger.info(s"Validated storage type, saving file") // TODO description has no metadata related to the Id only the file contents, can we reuse the old one? attributes <- saveFile(iri, storage, description, sourceEntity) + _ <- logger.info(s"Saved file, evaluating creation command destination storage") res <- eval(CreateFile(iri, destination.project, storageRef, storage.tpe, attributes, c.subject, destination.tag)) } yield res }.span("copyFile") @@ -223,8 +226,12 @@ final class Files( _ <- validateAuth(id.project, sourceStorage.value.storageValue.readPermission) attributes = file.value.attributes sourceBytes <- fetchFile(sourceStorage.value, attributes, file.id) - entity = HttpEntity(attributes.mediaType.getOrElse(`application/octet-stream`), sourceBytes) - (description, sourceEntity) <- extractFormData(iri, sourceStorage.value, entity) + bodyPartEntity = + HttpEntity(attributes.mediaType.getOrElse(ContentTypes.NoContentType), attributes.bytes, sourceBytes) + // TODO should this be strict? + multipartEntity = + Multipart.FormData(Multipart.FormData.BodyPart("file", bodyPartEntity, Map("filename" -> attributes.filename))) + (description, sourceEntity) <- extractFormData(iri, sourceStorage.value, multipartEntity.toEntity()) } yield (file.value, description, sourceEntity) private def fetchDestinationStorage(destination: FileCopyDestination)(implicit c: Caller) = @@ -236,11 +243,7 @@ final class Files( private def validateStorageTypeForCopy(source: StorageType, destination: Storage): IO[Unit] = IO.raiseWhen(source == StorageType.S3Storage)( WrappedStorageRejection( - InvalidStorageType( - destination.id, - found = source, - expected = Set(StorageType.DiskStorage, StorageType.RemoteDiskStorage) - ) + InvalidStorageType(destination.id, source, Set(StorageType.DiskStorage, StorageType.RemoteDiskStorage)) ) ) >> IO.raiseUnless(source == destination.tpe)( @@ -269,6 +272,7 @@ final class Files( tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { + _ <- logger.info(s"DTBDTB updating file...") (iri, pc) <- id.expandIri(fetchContext.onModify) _ <- test(UpdateFile(iri, id.project, testStorageRef, testStorageType, testAttributes, rev, caller.subject, tag)) (storageRef, storage) <- fetchActiveStorage(storageId, id.project, pc) @@ -305,6 +309,7 @@ final class Files( tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { + _ <- logger.info(s"DTBDTB updating file link...") (iri, pc) <- id.expandIri(fetchContext.onModify) _ <- test(UpdateFile(iri, id.project, testStorageRef, testStorageType, testAttributes, rev, caller.subject, tag)) (storageRef, storage) <- fetchActiveStorage(storageId, id.project, pc) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala index ea0c4e0c5c..9abccc76cb 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala @@ -10,6 +10,7 @@ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, MultipartUnmars import akka.stream.scaladsl.{Keep, Sink} import cats.effect.{ContextShift, IO} import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.{FileUtils, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileDescription @@ -44,6 +45,8 @@ sealed trait FormDataExtractor { } object FormDataExtractor { + private val log = Logger[FormDataExtractor] + private val fieldName: String = "file" private val defaultContentType: ContentType.Binary = ContentTypes.`application/octet-stream` @@ -91,6 +94,7 @@ object FormDataExtractor { case Unmarshaller.NoContentException => WrappedAkkaRejection(RequestEntityExpectedRejection) case x: UnsupportedContentTypeException => + log.info(s"Supported media type is ${x.supported}").unsafeRunSync() WrappedAkkaRejection(UnsupportedRequestContentTypeRejection(x.supported, x.actualContentType)) case x: IllegalArgumentException => WrappedAkkaRejection(ValidationRejection(Option(x.getMessage).getOrElse(""), Some(x))) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala index 426d9334e2..65c61683a4 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala @@ -28,7 +28,7 @@ import scala.annotation.nowarn * @param mediaType * the optional media type of the file * @param bytes - * the size of the file file in bytes + * the size of the file in bytes * @param digest * the digest information of the file * @param origin diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileCopyDestination.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileCopyDestination.scala index a7344ed05b..a7dae4833f 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileCopyDestination.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileCopyDestination.scala @@ -4,4 +4,9 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag -final case class FileCopyDestination(project: ProjectRef, storageId: Option[IdSegment], tag: Option[UserTag]) +final case class FileCopyDestination( + project: ProjectRef, + id: Option[FileId], + storageId: Option[IdSegment], + tag: Option[UserTag] +) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala index 0af0c34bd0..a55f658f1f 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala @@ -19,6 +19,7 @@ object FileId { def apply(ref: ResourceRef, project: ProjectRef): FileId = FileId(IdSegmentRef(ref), project) def apply(id: IdSegment, tag: UserTag, project: ProjectRef): FileId = FileId(IdSegmentRef(id, tag), project) def apply(id: IdSegment, rev: Int, project: ProjectRef): FileId = FileId(IdSegmentRef(id, rev), project) + def apply(id: IdSegment, project: ProjectRef): FileId = FileId(IdSegmentRef(id), project) val iriExpander: ExpandIri[InvalidFileId] = new ExpandIri(InvalidFileId.apply) } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala index 1763ee67fb..2466cfd2fb 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala @@ -88,6 +88,8 @@ object FileRejection { final case class InvalidFileId(id: String) extends FileRejection(s"File identifier '$id' cannot be expanded to an Iri.") + final case class InvalidProjectRef(ref: String) extends FileRejection(s"Project ref '$ref' is malformed.") + /** * Signals the impossibility to update a file when the digest is not computed * @@ -289,6 +291,7 @@ object FileRejection { case SaveRejection(_, _, SaveFileRejection.ResourceAlreadyExists(_)) => (StatusCodes.Conflict, Seq.empty) case FetchRejection(_, _, _) => (StatusCodes.InternalServerError, Seq.empty) case SaveRejection(_, _, _) => (StatusCodes.InternalServerError, Seq.empty) + case InvalidProjectRef(_) => (StatusCodes.BadRequest, Seq.empty) case _ => (StatusCodes.BadRequest, Seq.empty) } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala index e2366611d4..93f5583cb4 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes -import akka.http.scaladsl.model.StatusCodes.Created +import akka.http.scaladsl.model.StatusCodes.{BadRequest, Created} import akka.http.scaladsl.model.Uri.Path import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.model.{ContentType, MediaRange} @@ -8,10 +8,10 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import cats.effect.IO import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{File, FileCopyDestination, FileId, FileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.permissions.{read => Read, write => Write} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes.FilePutRequest import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes.FilesRoutes._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.{schemas, FileResource, Files} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig @@ -70,13 +70,15 @@ final class FilesRoutes( import baseUri.prefixSegment import schemeDirectives._ + private val log = Logger[FilesRoutes] + def routes: Route = (baseUriPrefix(baseUri.prefix) & replaceUri("files", schemas.files)) { pathPrefix("files") { extractCaller { implicit caller => - resolveProjectRef.apply { ref => + resolveProjectRef.apply { projectRef => implicit class IndexOps(io: IO[FileResource]) { - def index(m: IndexingMode): IO[FileResource] = io.flatTap(self.index(ref, _, m)) + def index(m: IndexingMode): IO[FileResource] = io.flatTap(self.index(projectRef, _, m)) } concat( @@ -90,7 +92,7 @@ final class FilesRoutes( emit( Created, files - .createLink(storage, ref, filename, mediaType, path, tag) + .createLink(storage, projectRef, filename, mediaType, path, tag) .index(mode) .attemptNarrow[FileRejection] ) @@ -99,79 +101,190 @@ final class FilesRoutes( extractRequestEntity { entity => emit( Created, - files.create(storage, ref, entity, tag).index(mode).attemptNarrow[FileRejection] + files.create(storage, projectRef, entity, tag).index(mode).attemptNarrow[FileRejection] ) } ) } }, (idSegment & indexingMode) { (id, mode) => - val fileId = FileId(id, ref) + val fileId = FileId(id, projectRef) concat( pathEndOrSingleSlash { operationName(s"$prefixSegment/files/{org}/{project}/{id}") { concat( (put & pathEndOrSingleSlash) { - filePutRequest(id, ref) { - case FilePutRequest.Create(storage, tag) => + concat( + parameters( + "sourceProject".as[String], + "sourceFile".as[IdSegment], + "sourceTag".as[UserTag].?, + "sourceRev".as[Int].?, + "destinationStorage".as[IdSegment].?, + "destinationTag".as[UserTag].? + ) { + case (sourceProj, sourceFile, sourceTag, sourceRev, destStorage, destTag) => { + log.info(s"RECEIVED IN ROUTES: $sourceProj").unsafeRunSync() + ProjectRef.parse(sourceProj) match { + case Left(_) => + emit(BadRequest, IO.pure(Left[FileRejection, Unit](InvalidProjectRef(sourceProj)))) + case Right(sourceProj) => + authorizeFor(projectRef, Read).apply { + val file: Option[FileId] = (sourceTag, sourceRev) match { + case (Some(tag), None) => Some(FileId(sourceFile, tag, sourceProj)) + case (None, Some(rev)) => Some(FileId(sourceFile, rev, sourceProj)) + case (None, None) => Some(FileId(sourceFile, sourceProj)) + case (Some(_), Some(_)) => None + } + file.fold( + emit( + BadRequest, + IO.pure( + Left[FileRejection, Unit]( + WrappedAkkaRejection(simultaneousTagAndRevRejection) + ) + ) + ) + ) { sourceFileId => + emit( + Created, + files + .copy( + sourceFileId, + FileCopyDestination(projectRef, Some(fileId), destStorage, destTag) + ) + .flatTap(index(projectRef, _, mode)) + .attemptNarrow[FileRejection] + .flatTap { + case Left(value) => log.info(s"Copying file failed with $value") + case Right(value) => log.info(s"Copying file succeeded with $value") + } + ) + } + + } + + } + } + + }, + parameters("rev".as[Int], "storage".as[IdSegment].?, "tag".as[UserTag].?) { + case (rev, storage, tag) => + concat( + // Update a Link + entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => + emit( + log.info(s"DTBDTB received update link operation") >> + files + .updateLink(fileId, storage, filename, mediaType, path, rev, tag) + .index(mode) + .attemptNarrow[FileRejection] + ) + }, + // Update a file + extractRequestEntity { entity => + emit( + log.info(s"DTBDTB received update operation") >> + files + .update(fileId, storage, rev, entity, tag) + .index(mode) + .attemptNarrow[FileRejection] + ) + } + ) + }, + parameters("storage".as[IdSegment].?, "tag".as[UserTag].?) { case (storage, tag) => concat( // Link a file with id segment entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => emit( Created, - files - .createLink(fileId, storage, filename, mediaType, path, tag) - .index(mode) - .attemptNarrow[FileRejection] + log.info(s"DTBDTB received createlink operation") >> + files + .createLink(fileId, storage, filename, mediaType, path, tag) + .index(mode) + .attemptNarrow[FileRejection] ) }, // Create a file with id segment extractRequestEntity { entity => emit( Created, - files - .create(fileId, storage, entity, tag) - .index(mode) - .attemptNarrow[FileRejection] + log.info(s"DTBDTB received create operation") >> + files + .create(fileId, storage, entity, tag) + .index(mode) + .attemptNarrow[FileRejection] ) } ) - case FilePutRequest.Update(rev, storage, tag) => - concat( - // Update a Link - entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => - emit( - files - .updateLink(fileId, storage, filename, mediaType, path, rev, tag) - .index(mode) - .attemptNarrow[FileRejection] - ) - }, - // Update a file - extractRequestEntity { entity => - emit( - files - .update(fileId, storage, rev, entity, tag) - .index(mode) - .attemptNarrow[FileRejection] - ) - } - ) - case FilePutRequest.Copy(sourceFile, destination) => - authorizeFor(ref, Read).apply { - emit( - Created, - files - .copy(sourceFile, destination) - .flatTap(index(destination.project, _, mode)) - .attemptNarrow[FileRejection] - ) - } - } + } + ) +// filePutRequest(id, ref) { +// case c@FilePutRequest.Create(storage, tag) => +// concat( +// // Link a file with id segment +// entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => +// emit( +// Created, +// log.info(s"DTBDTB received createlink operation $c") >> +// files +// .createLink(fileId, storage, filename, mediaType, path, tag) +// .index(mode) +// .attemptNarrow[FileRejection] +// ) +// }, +// // Create a file with id segment +// extractRequestEntity { entity => +// emit( +// Created, +// log.info(s"DTBDTB received create operation $c") >> +// files +// .create(fileId, storage, entity, tag) +// .index(mode) +// .attemptNarrow[FileRejection] +// ) +// } +// ) +// case u@FilePutRequest.Update(rev, storage, tag) => +// concat( +// // Update a Link +// entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => +// emit( +// log.info(s"DTBDTB received update link operation $u") >> +// files +// .updateLink(fileId, storage, filename, mediaType, path, rev, tag) +// .index(mode) +// .attemptNarrow[FileRejection] +// ) +// }, +// // Update a file +// extractRequestEntity { entity => +// emit( +// log.info(s"DTBDTB received update operation $u") >> +// files +// .update(fileId, storage, rev, entity, tag) +// .index(mode) +// .attemptNarrow[FileRejection] +// ) +// } +// ) +// case c@FilePutRequest.Copy(sourceFile, destination) => +// authorizeFor(ref, Read).apply { +// emit( +// Created, +// log.info(s"DTBDTB received copy operation $c") >> +// files +// .copy(sourceFile, destination) +// .flatTap(index(destination.project, _, mode)) +// .attemptNarrow[FileRejection] +// ) +// } +// } }, // Deprecate a file (delete & parameter("rev".as[Int])) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { emit( files .deprecate(fileId, rev) @@ -184,7 +297,7 @@ final class FilesRoutes( // Fetch a file (get & idSegmentRef(id)) { id => - emitOrFusionRedirect(ref, id, fetch(FileId(id, ref))) + emitOrFusionRedirect(projectRef, id, fetch(FileId(id, projectRef))) } ) } @@ -193,9 +306,9 @@ final class FilesRoutes( operationName(s"$prefixSegment/files/{org}/{project}/{id}/tags") { concat( // Fetch a file tags - (get & idSegmentRef(id) & pathEndOrSingleSlash & authorizeFor(ref, Read)) { id => + (get & idSegmentRef(id) & pathEndOrSingleSlash & authorizeFor(projectRef, Read)) { id => emit( - fetchMetadata(FileId(id, ref)) + fetchMetadata(FileId(id, projectRef)) .map(_.value.tags) .attemptNarrow[FileRejection] .rejectOn[FileNotFound] @@ -203,7 +316,7 @@ final class FilesRoutes( }, // Tag a file (post & parameter("rev".as[Int]) & pathEndOrSingleSlash) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { entity(as[Tag]) { case Tag(tagRev, tag) => emit( Created, @@ -214,7 +327,7 @@ final class FilesRoutes( }, // Delete a tag (tagLabel & delete & parameter("rev".as[Int]) & pathEndOrSingleSlash & authorizeFor( - ref, + projectRef, Write )) { (tag, rev) => emit( @@ -229,7 +342,7 @@ final class FilesRoutes( } }, (pathPrefix("undeprecate") & put & parameter("rev".as[Int])) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { emit( files .undeprecate(fileId, rev) @@ -247,30 +360,6 @@ final class FilesRoutes( } } - private def filePutRequest(id: IdSegment, proj: ProjectRef): Directive1[FilePutRequest] = - copyFileOperation(id, proj) | updateFileOperation | createFileOperation - - private def copyFileOperation(id: IdSegment, proj: ProjectRef): Directive1[FilePutRequest] = - (idSegmentRef(id) & parameters( - "destinationProject".as[ProjectRef], - "destinationStorage".as[IdSegment].?, - "destinationTag".as[UserTag].? - )) - .tmap[FilePutRequest] { case (idRef, destProj, destStorage, destTag) => - FilePutRequest.Copy(FileId(idRef, proj), FileCopyDestination(destProj, destStorage, destTag)) - } - - private def updateFileOperation: Directive1[FilePutRequest] = - parameters("rev".as[Int], "storage".as[IdSegment].?, "tag".as[UserTag].?).tmap[FilePutRequest] { - case (rev, storage, tag) => - FilePutRequest.Update(rev, storage, tag) - } - - private def createFileOperation: Directive1[FilePutRequest] = - parameters("storage".as[IdSegment].?, "tag".as[UserTag].?).tmap[FilePutRequest] { case (storage, tag) => - FilePutRequest.Create(storage, tag) - } - def fetch(id: FileId)(implicit caller: Caller): Route = (headerValueByType(Accept) & varyAcceptHeaders) { case accept if accept.mediaRanges.exists(metadataMediaRanges.contains) => diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala index 5b1dbf48ad..3de1715645 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala @@ -60,6 +60,11 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(PUT, url, Some(body), identity, extraHeaders)(assertResponse) + def putAndReturn[A](url: String, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( + assertResponse: (A, HttpResponse) => (A, Assertion) + )(implicit um: FromEntityUnmarshaller[A]): IO[A] = + requestAssertAndReturn(PUT, url, None, identity, extraHeaders)(assertResponse).map(_._1) + def putAttachmentFromPath[A]( url: String, path: Path, @@ -139,25 +144,25 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(DELETE, url, None, identity, extraHeaders)(assertResponse) - def requestAssert[A]( + def requestAssertAndReturn[A]( method: HttpMethod, url: String, body: Option[Json], identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { + )(assertResponse: (A, HttpResponse) => (A, Assertion))(implicit um: FromEntityUnmarshaller[A]): IO[(A, Assertion)] = { def buildClue(a: A, response: HttpResponse) = s""" - |Endpoint: ${method.value} $url - |Identity: $identity - |Token: ${Option(tokensMap.get(identity)).map(_.credentials.token()).getOrElse("None")} - |Status code: ${response.status} - |Body: ${body.getOrElse("None")} - |Response: - |$a - |""".stripMargin - - requestJson( + |Endpoint: ${method.value} $url + |Identity: $identity + |Token: ${Option(tokensMap.get(identity)).map(_.credentials.token()).getOrElse("None")} + |Status code: ${response.status} + |Body: ${body.getOrElse("None")} + |Response: + |$a + |""".stripMargin + + requestJson[A, (A, Assertion)]( method, url, body, @@ -167,6 +172,17 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit ) } + def requestAssert[A]( + method: HttpMethod, + url: String, + body: Option[Json], + identity: Identity, + extraHeaders: Seq[HttpHeader] = jsonHeaders + )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = + requestAssertAndReturn[A](method, url, body, identity, extraHeaders) { (a, resp) => + (a, assertResponse(a, resp)) + }.map(_._2) + def sparqlQuery[A](url: String, query: String, identity: Identity, extraHeaders: Seq[HttpHeader] = Nil)( assertResponse: (A, HttpResponse) => Assertion )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala index 1763ebb0d3..ae2154eead 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala @@ -60,7 +60,8 @@ object Optics extends Optics { val `@type` = root.`@type`.string val _uuid = root._uuid.string - val _total = root._total.long + val _total = root._total.long + val _filename = root._filename.string val hits = root.hits.hits val totalHits = root.hits.total.value.int diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala new file mode 100644 index 0000000000..0714e646e2 --- /dev/null +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala @@ -0,0 +1,49 @@ +package ch.epfl.bluebrain.nexus.tests.kg + +import akka.http.scaladsl.model._ +import akka.util.ByteString +import cats.effect.IO +import cats.implicits.catsSyntaxFlatMapOps +import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils +import ch.epfl.bluebrain.nexus.tests.HttpClient._ +import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote +import ch.epfl.bluebrain.nexus.tests.Optics +import io.circe.Json +import org.scalatest.Assertion + +trait CopyFileSpec { self: StorageSpec => + + "Copying a json file to a different organization" should { + + val jsonFileContent = """{ "updated": ["is", "a", "test", "file"] }""" + + def givenAProjectWithStorage(test: String => IO[Assertion]): IO[Assertion] = { + val (proj, org) = (genId(), genId()) + val projRef = s"$org/$proj" + createProjects(Coyote, org, proj) >> + createStorages(projRef) >> + test(projRef) + } + + "suceed" in { + givenAProjectWithStorage { destProjRef => + val sourceFileId = "attachment.json" + val destFileId = "attachment2.json" + val uri = + s"/files/$destProjRef/$destFileId?sourceProject=$projectRef&sourceFile=$sourceFileId&destinationStorage=nxv:$storageId" + for { + json <- deltaClient.putAndReturn[Json](uri, Coyote) { (json, response) => + (json, expectCreated(json, response)) + } + _ = println(json) + _ = Optics._filename.getOption(json) shouldBe Some(sourceFileId) + returnedId = Optics.`@id`.getOption(json).getOrElse(fail("could not find @id of created resource")) + assertion <- + deltaClient.get[ByteString](s"/files/$destProjRef/${UrlUtils.encode(returnedId)}", Coyote, acceptAll) { + expectDownload("attachment.json", ContentTypes.`application/json`, jsonFileContent) + } + } yield assertion + } + } + } +} diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala index bdc4007d7a..f3f56d01e1 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala @@ -8,7 +8,7 @@ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission import io.circe.Json import org.scalatest.Assertion -class DiskStorageSpec extends StorageSpec { +class DiskStorageSpec extends StorageSpec with CopyFileSpec { override def storageName: String = "disk" @@ -32,7 +32,7 @@ class DiskStorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf("/kg/storages/disk.json") val payload2 = jsonContentOf("/kg/storages/disk-perms.json") diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala index 4bf6b3ab60..ad93d53f4f 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala @@ -17,7 +17,7 @@ import org.scalatest.Assertion import scala.annotation.nowarn import scala.sys.process._ -class RemoteStorageSpec extends StorageSpec { +class RemoteStorageSpec extends StorageSpec with CopyFileSpec { override def storageName: String = "external" @@ -60,7 +60,7 @@ class RemoteStorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/remote-disk.json", "endpoint" -> externalEndpoint, diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala index cd864ea7f6..8d76a09be3 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala @@ -82,7 +82,7 @@ class S3StorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/s3.json", "storageId" -> s"https://bluebrain.github.io/nexus/vocabulary/$storageId", diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala index 023a1e1fa3..aa2ae05569 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala @@ -39,7 +39,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { def locationPrefix: Option[String] - def createStorages: IO[Assertion] + def createStorages(projectRef: String): IO[Assertion] protected def fileSelf(project: String, id: String): String = { val uri = Uri(s"${config.deltaUri}/files/$project") @@ -55,7 +55,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { "Creating a storage" should { s"succeed for a $storageName storage" in { - createStorages + createStorages(projectRef) } "wait for storages to be indexed" in { @@ -424,7 +424,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { s"=?UTF-8?B?$encodedFilename?=" } - private def expectDownload( + protected def expectDownload( expectedFilename: String, expectedContentType: ContentType, expectedContent: String,