diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ProjectsModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ProjectsModule.scala index de50996460..bb002ee7ee 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ProjectsModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ProjectsModule.scala @@ -24,14 +24,13 @@ import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.OrganizationRejecti import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.projects._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectRejection.WrappedOrganizationRejection -import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, Project, ProjectEvent, ProjectRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, Project, ProjectEvent} import ch.epfl.bluebrain.nexus.delta.sdk.provisioning.ProjectProvisioning import ch.epfl.bluebrain.nexus.delta.sdk.quotas.Quotas import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Supervisor import izumi.distage.model.definition.{Id, ModuleDef} -import monix.execution.Scheduler /** * Projects wiring @@ -67,8 +66,7 @@ object ProjectsModule extends ModuleDef { .fetchActiveOrganization(_) .adaptError { case e: OrganizationRejection => WrappedOrganizationRejection(e) - } - .toBIO[ProjectRejection], + }, ValidateProjectDeletion(xas, config.projects.deletion.enabled), scopeInitializations, mappings.merge, @@ -120,12 +118,11 @@ object ProjectsModule extends ModuleDef { } make[UUIDCache].fromEffect { (config: AppConfig, xas: Transactors) => - toCatsIO(UUIDCache(config.projects.cache, config.organizations.cache, xas)) + UUIDCache(config.projects.cache, config.organizations.cache, xas) } - make[DeltaSchemeDirectives].from { - (fetchContext: FetchContext[ContextRejection], uuidCache: UUIDCache, s: Scheduler) => - DeltaSchemeDirectives(fetchContext, uuidCache)(s) + make[DeltaSchemeDirectives].from { (fetchContext: FetchContext[ContextRejection], uuidCache: UUIDCache) => + DeltaSchemeDirectives(fetchContext, uuidCache) } make[ProjectsRoutes].from { diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ElemRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ElemRoutesSpec.scala index ff76486bc7..614d781101 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ElemRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ElemRoutesSpec.scala @@ -20,7 +20,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter import ch.epfl.bluebrain.nexus.delta.sourcing.stream.RemainingElems import ch.epfl.bluebrain.nexus.testkit.CirceLiteral -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import fs2.Stream import java.time.Instant diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/EventsRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/EventsRoutesSpec.scala index 5553c63ea5..5f2e3bd2f8 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/EventsRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/EventsRoutesSpec.scala @@ -20,7 +20,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authent import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset.{At, Start} -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import fs2.Stream import java.util.UUID diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/OrganizationsRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/OrganizationsRoutesSpec.scala index c07f8bcda1..4025abe928 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/OrganizationsRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/OrganizationsRoutesSpec.scala @@ -20,7 +20,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.OwnerPermissionsScopeInitializ import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.Json import java.util.UUID diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ProjectsRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ProjectsRoutesSpec.scala index 371506797d..2f11358f79 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ProjectsRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ProjectsRoutesSpec.scala @@ -23,7 +23,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.provisioning.{AutomaticProvisioningConf import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.Json import monix.bio.{IO, UIO} import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala index 49d2e1a60f..be37a3e557 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala @@ -32,7 +32,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues import io.circe.{Json, Printer} import org.scalatest.Assertion diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala index 86ab05435c..00ff642eb3 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala @@ -28,7 +28,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.schemas.{SchemaImports, SchemasConfig, import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues import io.circe.Json diff --git a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/ClasspathResourceUtils.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/ClasspathResourceUtils.scala index d51c30cc23..3ee2eefc20 100644 --- a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/ClasspathResourceUtils.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/ClasspathResourceUtils.scala @@ -90,15 +90,6 @@ trait ClasspathResourceUtils { props.asScala.toMap } - final def bioPropertiesOf(resourcePath: String)(implicit - classLoader: ClassLoader - ): BIO[ClasspathResourceError, Map[String, String]] = - bioStreamOf(resourcePath).map { is => - val props = new Properties() - props.load(is) - props.asScala.toMap - } - /** * Loads the content of the argument classpath resource as a string and replaces all the key matches of the * ''replacements'' with their values. The resulting string is parsed into a json value. diff --git a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/IOUtils.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/IOUtils.scala index fd4b9dd7ce..d434bdebc2 100644 --- a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/IOUtils.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/utils/IOUtils.scala @@ -7,6 +7,7 @@ import monix.bio.{Task, UIO} import java.time.Instant import java.util.UUID import java.util.concurrent.TimeUnit +import scala.concurrent.duration.{DurationLong, FiniteDuration, MILLISECONDS} trait IOUtils { @@ -22,6 +23,13 @@ object IOUtils extends IOUtils trait IOInstant { def now(implicit clock: Clock[IO]): IO[Instant] = clock.realTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli) + + def timed[A](io: IO[A])(implicit c: Clock[IO]): IO[(A, FiniteDuration)] = + for { + start <- c.monotonic(MILLISECONDS) + result <- io + finish <- c.monotonic(MILLISECONDS) + } yield (result, (finish - start).millis) } object IOInstant extends IOInstant diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala index 12822dc8b7..bdaaea248e 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala @@ -39,8 +39,6 @@ import io.circe.Json * a source decoder for [[ArchiveValue]] * @param config * the log config - * @param uuidF - * the uuid generator * @param rcr * the archive remote context resolution */ @@ -82,7 +80,7 @@ class Archives( */ def create(project: ProjectRef, source: Json)(implicit subject: Subject): IO[ArchiveResource] = (for { - p <- toCatsIO(fetchContext.onRead(project)) + p <- fetchContext.onRead(project) (iri, value) <- toCatsIO(sourceDecoder(p, source)) res <- create(iri, project, value) } yield res).span("createArchive") @@ -149,12 +147,11 @@ class Archives( source <- archiveDownload(value.value, project, ignoreNotFound) } yield source).span("downloadArchive") - private def expandWithContext(id: IdSegment, project: ProjectRef) = toCatsIO { + private def expandWithContext(id: IdSegment, project: ProjectRef) = for { p <- fetchContext.onRead(project) iri <- expandIri(id, p) } yield (iri, p) - } private def eval(cmd: CreateArchive): IO[ArchiveResource] = log.evaluate(cmd.project, cmd.id, cmd).map { _.toResource(config.ttl) } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala index a1ac940d44..231f27e7e8 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala @@ -3,7 +3,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive import akka.http.scaladsl.model.Uri import cats.effect.IO import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri @@ -101,9 +100,7 @@ object FileSelf { case Array(org, project, id) => for { project <- IO.fromEither(ProjectRef.parse(org, project).leftMap(_ => InvalidProject(self))) - projectContext <- toCatsIO( - fetchContext.onRead(project).mapError { _ => InvalidProjectContext(self, project) } - ) + projectContext <- fetchContext.onRead(project).adaptError { _ => InvalidProjectContext(self, project) } decodedId = UrlUtils.decode(id) iriOption = IdSegment(decodedId).toIri(projectContext.apiMappings, projectContext.base).map(ResourceRef(_)) diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphDecoderConfiguration.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphDecoderConfiguration.scala index c6aaea284f..289536bf63 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphDecoderConfiguration.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphDecoderConfiguration.scala @@ -1,16 +1,17 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.{contexts, BlazegraphViewType} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, JsonLdContext, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.decoder.Configuration -import monix.bio.Task private[blazegraph] object BlazegraphDecoderConfiguration { - def apply(implicit jsonLdApi: JsonLdApi, rcr: RemoteContextResolution): Task[Configuration] = for { - contextValue <- Task.delay { ContextValue(contexts.blazegraph) } - jsonLdContext <- JsonLdContext(contextValue) + def apply(implicit jsonLdApi: JsonLdApi, rcr: RemoteContextResolution): IO[Configuration] = for { + contextValue <- IO.delay { ContextValue(contexts.blazegraph) } + jsonLdContext <- JsonLdContext(contextValue).toCatsIO } yield { val enhancedJsonLdContext = jsonLdContext .addAliasIdType("IndexingBlazegraphViewValue", BlazegraphViewType.IndexingBlazegraphView.tpe) diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala index 3bd7afec69..301e074acc 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala @@ -3,12 +3,12 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph import akka.actor.typed.ActorSystem import cats.effect.{Clock, ContextShift, IO, Timer} import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ -import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF +import ch.epfl.bluebrain.nexus.delta.kernel.utils.{CatsEffectsClasspathResourceUtils, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.config.BlazegraphViewsConfig import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing.BlazegraphCoordinator import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphViewRejection.ProjectContextRejection -import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.{contexts, schema => viewsSchemaId, BlazegraphView, BlazegraphViewEvent} +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.{contexts, schema => viewsSchemaId, BlazegraphView, BlazegraphViewEvent, DefaultProperties} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.routes.{BlazegraphViewsIndexingRoutes, BlazegraphViewsRoutes, BlazegraphViewsRoutesHandler} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.{BlazegraphSlowQueryDeleter, BlazegraphSlowQueryLogger, BlazegraphSlowQueryStore} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi @@ -48,6 +48,10 @@ class BlazegraphPluginModule(priority: Int) extends ModuleDef { make[BlazegraphViewsConfig].from { BlazegraphViewsConfig.load(_) } + make[DefaultProperties].fromEffect { + CatsEffectsClasspathResourceUtils.ioPropertiesOf("blazegraph/index.properties").map(DefaultProperties) + } + make[HttpClient].named("http-indexing-client").from { (cfg: BlazegraphViewsConfig, as: ActorSystem[Nothing], sc: Scheduler) => HttpClient()(cfg.indexingClient, as.classicSystem, sc) @@ -69,17 +73,18 @@ class BlazegraphPluginModule(priority: Int) extends ModuleDef { )(timer) } - make[BlazegraphSlowQueryLogger].from { (cfg: BlazegraphViewsConfig, store: BlazegraphSlowQueryStore) => - BlazegraphSlowQueryLogger(store, cfg.slowQueries.slowQueryThreshold) + make[BlazegraphSlowQueryLogger].from { (cfg: BlazegraphViewsConfig, store: BlazegraphSlowQueryStore, c: Clock[IO]) => + BlazegraphSlowQueryLogger(store, cfg.slowQueries.slowQueryThreshold)(c) } make[BlazegraphClient].named("blazegraph-indexing-client").from { ( cfg: BlazegraphViewsConfig, client: HttpClient @Id("http-indexing-client"), - as: ActorSystem[Nothing] + as: ActorSystem[Nothing], + properties: DefaultProperties ) => - BlazegraphClient(client, cfg.base, cfg.credentials, cfg.queryTimeout)(as.classicSystem) + BlazegraphClient(client, cfg.base, cfg.credentials, cfg.queryTimeout, properties.value)(as.classicSystem) } make[HttpClient].named("http-query-client").from { @@ -91,9 +96,10 @@ class BlazegraphPluginModule(priority: Int) extends ModuleDef { ( cfg: BlazegraphViewsConfig, client: HttpClient @Id("http-query-client"), - as: ActorSystem[Nothing] + as: ActorSystem[Nothing], + properties: DefaultProperties ) => - BlazegraphClient(client, cfg.base, cfg.credentials, cfg.queryTimeout)(as.classicSystem) + BlazegraphClient(client, cfg.base, cfg.credentials, cfg.queryTimeout, properties.value)(as.classicSystem) } make[ValidateBlazegraphView].from { diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphServiceDependency.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphServiceDependency.scala index f2da3575d2..a5ee8d36c6 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphServiceDependency.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphServiceDependency.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.sdk.ServiceDependency import ch.epfl.bluebrain.nexus.delta.sdk.model.ComponentDescription.ServiceDescription diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViews.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViews.scala index dfe78b24bf..a16c886012 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViews.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViews.scala @@ -6,7 +6,7 @@ import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.utils.{IOInstant, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViews._ -import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.{BlazegraphClient, SparqlClientError} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing.IndexingViewDef import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing.IndexingViewDef.{ActiveViewDef, DeprecatedViewDef} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphView.IndexingBlazegraphView @@ -63,7 +63,7 @@ final class BlazegraphViews( */ def create(project: ProjectRef, source: Json)(implicit caller: Caller): IO[ViewResource] = { for { - pc <- fetchContext.onCreate(project).toCatsIO + pc <- fetchContext.onCreate(project) (iri, viewValue) <- sourceDecoder(project, pc, source).toCatsIO res <- eval(CreateBlazegraphView(iri, project, viewValue, source, caller.subject)) _ <- createNamespace(res) @@ -86,8 +86,8 @@ final class BlazegraphViews( source: Json )(implicit caller: Caller): IO[ViewResource] = { for { - pc <- fetchContext.onCreate(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(project) + iri <- expandIri(id, pc) viewValue <- sourceDecoder(project, pc, iri, source).toCatsIO res <- eval(CreateBlazegraphView(iri, project, viewValue, source, caller.subject)) _ <- createNamespace(res) @@ -107,8 +107,8 @@ final class BlazegraphViews( subject: Subject ): IO[ViewResource] = { for { - pc <- fetchContext.onCreate(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(project) + iri <- expandIri(id, pc) source = view.toJson(iri) res <- eval(CreateBlazegraphView(iri, project, view, source, subject)) _ <- createNamespace(res) @@ -133,8 +133,8 @@ final class BlazegraphViews( source: Json )(implicit caller: Caller): IO[ViewResource] = { for { - pc <- fetchContext.onModify(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) viewValue <- sourceDecoder(project, pc, iri, source).toCatsIO res <- eval(UpdateBlazegraphView(iri, project, viewValue, rev, source, caller.subject)) _ <- createNamespace(res) @@ -157,8 +157,8 @@ final class BlazegraphViews( subject: Subject ): IO[ViewResource] = { for { - pc <- fetchContext.onModify(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) source = view.toJson(iri) res <- eval(UpdateBlazegraphView(iri, project, view, rev, source, subject)) _ <- createNamespace(res) @@ -187,8 +187,8 @@ final class BlazegraphViews( rev: Int )(implicit subject: Subject): IO[ViewResource] = { for { - pc <- fetchContext.onModify(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) res <- eval(TagBlazegraphView(iri, project, tagRev, tag, rev, subject)) _ <- createNamespace(res) } yield res @@ -210,8 +210,8 @@ final class BlazegraphViews( rev: Int )(implicit subject: Subject): IO[ViewResource] = { for { - pc <- fetchContext.onModify(project).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) res <- eval(DeprecateBlazegraphView(iri, project, rev, subject)) } yield res }.span("deprecateBlazegraphView") @@ -247,8 +247,8 @@ final class BlazegraphViews( project: ProjectRef ): IO[BlazegraphViewState] = { for { - pc <- fetchContext.onRead(project).toCatsIO - iri <- expandIri(id.value, pc).toCatsIO + pc <- fetchContext.onRead(project) + iri <- expandIri(id.value, pc) notFound = ViewNotFound(iri, project) state <- id match { case Latest(_) => log.stateOr(project, iri, notFound) @@ -542,9 +542,8 @@ object BlazegraphViews { case i: IndexingBlazegraphView => client .createNamespace(BlazegraphViews.namespace(i, prefix)) - .mapError(WrappedBlazegraphClientError.apply) + .adaptError { case e: SparqlClientError => WrappedBlazegraphClientError(e) } .void - .toCatsIO case _ => IO.unit } apply(fetchContext, contextResolution, validate, createNameSpace, eventLogConfig, prefix, xas) diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViewsQuery.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViewsQuery.scala index af2e5adad9..0fbd2ffa65 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViewsQuery.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphViewsQuery.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.kernel.utils.ClasspathResourceUtils.ioContentOf @@ -143,8 +144,8 @@ object BlazegraphViewsQuery { base: BaseUri ): IO[SearchResults[SparqlLink]] = for { - p <- fetchContext.onRead(projectRef).toCatsIO - iri <- expandIri(id, p).toCatsIO + p <- fetchContext.onRead(projectRef) + iri <- expandIri(id, p) q = SparqlQuery(replace(incomingQuery, iri, pagination)) bindings <- query(IriSegment(defaultViewId), projectRef, q, SparqlResultsJson) links = toSparqlLinks(bindings.value) @@ -157,8 +158,8 @@ object BlazegraphViewsQuery { includeExternalLinks: Boolean )(implicit caller: Caller, base: BaseUri): IO[SearchResults[SparqlLink]] = for { - p <- fetchContext.onRead(projectRef).toCatsIO - iri <- expandIri(id, p).toCatsIO + p <- fetchContext.onRead(projectRef) + iri <- expandIri(id, p) queryTemplate = if (includeExternalLinks) outgoingWithExternalQuery else outgoingScopedQuery q = SparqlQuery(replace(queryTemplate, iri, pagination)) bindings <- query(IriSegment(defaultViewId), projectRef, q, SparqlResultsJson) @@ -182,7 +183,6 @@ object BlazegraphViewsQuery { AuthorizationFailed(i.ref.project, i.permission) ) .as(Set(i.index)) - .toBIO[BlazegraphViewRejection] case a: AggregateView => aclCheck .mapFilter[IndexingView, String]( @@ -190,11 +190,12 @@ object BlazegraphViewsQuery { v => ProjectAcl(v.ref.project) -> v.permission, _.index ) - .toUIO } qr <- logSlowQueries( BlazegraphQueryContext(ViewRef.apply(project, iri), query, caller.subject), - client.query(indices, query, responseType).mapError(WrappedBlazegraphClientError) + client.query(indices, query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } ) } yield qr diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClient.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClient.scala index 8573f45db4..93d94c9dd5 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClient.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClient.scala @@ -7,19 +7,20 @@ import akka.http.scaladsl.model.headers.{BasicHttpCredentials, HttpCredentials, import akka.http.scaladsl.model.{HttpEntity, HttpHeader, Uri} import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers.stringUnmarshaller -import ch.epfl.bluebrain.nexus.delta.kernel.utils.ClasspathResourceUtils +import cats.effect.IO +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient.timeoutHeader import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlClientError.{InvalidCountRequest, WrappedHttpClientError} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.{Aux, SparqlResultsJson} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.config.BlazegraphViewsConfig.Credentials import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery.SparqlConstructQuery -import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient +import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientError} import ch.epfl.bluebrain.nexus.delta.sdk.model.ComponentDescription.ServiceDescription import ch.epfl.bluebrain.nexus.delta.sdk.model.ComponentDescription.ServiceDescription.ResolvedServiceDescription import ch.epfl.bluebrain.nexus.delta.sdk.model.Name import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ -import monix.bio.{IO, UIO} import scala.concurrent.duration._ @@ -29,24 +30,20 @@ import scala.concurrent.duration._ class BlazegraphClient( client: HttpClient, endpoint: Uri, - queryTimeout: Duration + queryTimeout: Duration, + defaultProperties: Map[String, String] )(implicit credentials: Option[HttpCredentials], as: ActorSystem) extends SparqlClient(client, SparqlQueryEndpoint.blazegraph(endpoint)) { - implicit private val cl: ClassLoader = getClass.getClassLoader - private val serviceVersion = """(buildVersion">)([^<]*)""".r private val serviceName = Name.unsafe("blazegraph") - private val defaultProperties = - ClasspathResourceUtils.bioPropertiesOf("blazegraph/index.properties").hideErrors.memoizeOnSuccess - override def query[R <: SparqlQueryResponse]( indices: Iterable[String], q: SparqlQuery, responseType: Aux[R], additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, R] = { + ): IO[R] = { val headers = queryTimeout match { case finite: FiniteDuration => additionalHeaders :+ RawHeader(timeoutHeader, finite.toMillis.toString) case _ => additionalHeaders @@ -57,10 +54,11 @@ class BlazegraphClient( /** * Fetches the service description information (name and version) */ - def serviceDescription: UIO[ServiceDescription] = + def serviceDescription: IO[ServiceDescription] = client .fromEntityTo[ResolvedServiceDescription](Get(endpoint / "status")) .timeout(5.seconds) + .toCatsIO .redeem( _ => ServiceDescription.unresolved(serviceName), _.map(_.copy(name = serviceName)).getOrElse(ServiceDescription.unresolved(serviceName)) @@ -69,11 +67,13 @@ class BlazegraphClient( /** * Check whether the passed namespace ''namespace'' exists. */ - def existsNamespace(namespace: String): IO[SparqlClientError, Boolean] = - client(Get(endpoint / "namespace" / namespace)) { - case resp if resp.status == OK => UIO.delay(resp.discardEntityBytes()) >> IO.pure(true) - case resp if resp.status == NotFound => UIO.delay(resp.discardEntityBytes()) >> IO.pure(false) - }.mapError(WrappedHttpClientError) + def existsNamespace(namespace: String): IO[Boolean] = + client + .run(Get(endpoint / "namespace" / namespace)) { + case resp if resp.status == OK => IO.delay(resp.discardEntityBytes()).as(true) + case resp if resp.status == NotFound => IO.delay(resp.discardEntityBytes()).as(false) + } + .adaptError { case e: HttpClientError => WrappedHttpClientError(e) } /** * Attempts to create a namespace (if it doesn't exist) recovering gracefully when the namespace already exists. @@ -85,17 +85,19 @@ class BlazegraphClient( * @return * ''true'' wrapped on an IO when namespace has been created and ''false'' wrapped on an IO when it already existed */ - def createNamespace(namespace: String, properties: Map[String, String]): IO[SparqlClientError, Boolean] = + def createNamespace(namespace: String, properties: Map[String, String]): IO[Boolean] = existsNamespace(namespace).flatMap { case true => IO.pure(false) case false => val updated = properties + ("com.bigdata.rdf.sail.namespace" -> namespace) val payload = updated.map { case (key, value) => s"$key=$value" }.mkString("\n") val req = Post(endpoint / "namespace", HttpEntity(payload)) - client(req) { - case resp if resp.status.isSuccess() => UIO.delay(resp.discardEntityBytes()) >> IO.pure(true) - case resp if resp.status == Conflict => UIO.delay(resp.discardEntityBytes()) >> IO.pure(false) - }.mapError(WrappedHttpClientError) + client + .run(req) { + case resp if resp.status.isSuccess() => IO.delay(resp.discardEntityBytes()).as(true) + case resp if resp.status == Conflict => IO.delay(resp.discardEntityBytes()).as(false) + } + .adaptError { case e: HttpClientError => WrappedHttpClientError(e) } } /** @@ -107,8 +109,7 @@ class BlazegraphClient( * @return * ''true'' wrapped on an IO when namespace has been created and ''false'' wrapped on an IO when it already existed */ - def createNamespace(namespace: String): IO[SparqlClientError, Boolean] = - defaultProperties.flatMap(createNamespace(namespace, _)) + def createNamespace(namespace: String): IO[Boolean] = createNamespace(namespace, defaultProperties) /** * Attempts to delete a namespace recovering gracefully when the namespace does not exists. @@ -116,16 +117,18 @@ class BlazegraphClient( * @return * ''true'' wrapped in ''F'' when namespace has been deleted and ''false'' wrapped in ''F'' when it does not existe */ - def deleteNamespace(namespace: String): IO[SparqlClientError, Boolean] = - client(Delete(endpoint / "namespace" / namespace)) { - case resp if resp.status == OK => UIO.delay(resp.discardEntityBytes()) >> IO.pure(true) - case resp if resp.status == NotFound => UIO.delay(resp.discardEntityBytes()) >> IO.pure(false) - }.mapError(WrappedHttpClientError) + def deleteNamespace(namespace: String): IO[Boolean] = + client + .run(Delete(endpoint / "namespace" / namespace)) { + case resp if resp.status == OK => IO.delay(resp.discardEntityBytes()).as(true) + case resp if resp.status == NotFound => IO.delay(resp.discardEntityBytes()).as(false) + } + .adaptError { case e: HttpClientError => WrappedHttpClientError(e) } /** * Count all the triples on an index */ - def count(index: String): IO[SparqlClientError, Long] = { + def count(index: String): IO[Long] = { val sparqlQuery = SparqlConstructQuery.unsafe("SELECT (COUNT(?s) AS ?count) WHERE { ?s ?p ?o }") query(Set(index), sparqlQuery, SparqlResultsJson) .flatMap { response => @@ -135,7 +138,7 @@ class BlazegraphClient( count <- countAsString.value.toLongOption } yield count - IO.fromOption(count, InvalidCountRequest(index, sparqlQuery.value)) + IO.fromOption(count)(InvalidCountRequest(index, sparqlQuery.value)) } } @@ -163,10 +166,11 @@ object BlazegraphClient { client: HttpClient, endpoint: Uri, credentials: Option[Credentials], - queryTimeout: Duration + queryTimeout: Duration, + defaultProperties: Map[String, String] )(implicit as: ActorSystem): BlazegraphClient = { implicit val cred: Option[BasicHttpCredentials] = credentials.map { cred => BasicHttpCredentials(cred.username, cred.password.value) } - new BlazegraphClient(client, endpoint, queryTimeout) + new BlazegraphClient(client, endpoint, queryTimeout, defaultProperties) } } diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlClient.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlClient.scala index fe7ea76837..bfb1c41a5c 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlClient.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlClient.scala @@ -5,21 +5,21 @@ import akka.http.scaladsl.client.RequestBuilding.Post import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.{Accept, HttpCredentials} -import cats.syntax.foldable._ +import cats.effect.IO +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlClientError.{InvalidUpdateRequest, WrappedHttpClientError} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponse.{SparqlJsonLdResponse, SparqlNTriplesResponse, SparqlRdfXmlResponse, SparqlResultsResponse, SparqlXmlResultsResponse} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.BNode import ch.epfl.bluebrain.nexus.delta.rdf.graph.NTriples import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery -import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient +import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientError} import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import io.circe.Json import io.circe.syntax._ -import monix.bio.IO import org.apache.jena.query.ParameterizedSparqlString -import scala.util.Try import scala.xml.{Elem, NodeSeq} trait SparqlQueryClient { @@ -42,7 +42,7 @@ trait SparqlQueryClient { q: SparqlQuery, responseType: SparqlQueryResponseType.Aux[R], additionalHeaders: Seq[HttpHeader] = Seq.empty - ): IO[SparqlClientError, R] + ): IO[R] } /** @@ -61,7 +61,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit q: SparqlQuery, responseType: SparqlQueryResponseType.Aux[R], additionalHeaders: Seq[HttpHeader] = Seq.empty - ): IO[SparqlClientError, R] = + ): IO[R] = responseType match { case SparqlResultsJson => sparqlResultsResponse(indices, q, additionalHeaders) case SparqlResultsXml => sparqlXmlResultsResponse(indices, q, additionalHeaders) @@ -80,17 +80,18 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit * @return * successful Future[Unit] if update succeeded, failure otherwise */ - def bulk(index: String, queries: Seq[SparqlWriteQuery]): IO[SparqlClientError, Unit] = { + def bulk(index: String, queries: Seq[SparqlWriteQuery]): IO[Unit] = { val queryString = queries.map(_.value).mkString("\n") val pss = new ParameterizedSparqlString pss.setCommandText(queryString) for { - _ <- IO.fromTry(Try(pss.asUpdate())).mapError(th => InvalidUpdateRequest(index, queryString, th.getMessage)) + _ <- IO(pss.asUpdate()).adaptError(e => InvalidUpdateRequest(index, queryString, e.getMessage)) queryOpt = uniqueGraph(queries).map(graph => Query("using-named-graph-uri" -> graph.toString)) formData = FormData("update" -> queryString) reqEndpoint = endpoint(index).withQuery(queryOpt.getOrElse(Query.Empty)) req = Post(reqEndpoint, formData).withHttpCredentials - result <- client.discardBytes(req, ()).mapError(WrappedHttpClientError) + result <- + client.discardBytes(req, ()).toCatsIO.adaptError { case e: HttpClientError => WrappedHttpClientError(e) } } yield result } @@ -105,7 +106,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit * @param data * the new graph as NTriples representation */ - def replace(index: String, graph: Uri, data: NTriples): IO[SparqlClientError, Unit] = + def replace(index: String, graph: Uri, data: NTriples): IO[Unit] = bulk(index, Seq(SparqlWriteQuery.replace(graph, data))) /** @@ -121,7 +122,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit * @param strategy * the patch strategy */ - def patch(index: String, graph: Uri, data: NTriples, strategy: PatchStrategy): IO[SparqlClientError, Unit] = + def patch(index: String, graph: Uri, data: NTriples, strategy: PatchStrategy): IO[Unit] = bulk(index, Seq(SparqlWriteQuery.patch(graph, data, strategy))) /** @@ -132,7 +133,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit * @param graph * the graph to drop */ - def drop(index: String, graph: Uri): IO[SparqlClientError, Unit] = + def drop(index: String, graph: Uri): IO[Unit] = bulk(index, Seq(SparqlWriteQuery.drop(graph))) private def uniqueGraph(query: Seq[SparqlWriteQuery]): Option[Uri] = @@ -145,7 +146,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit indices: Iterable[String], q: SparqlQuery, additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, SparqlResultsResponse] = + ): IO[SparqlResultsResponse] = indices.toList .foldLeftM(SparqlResults.empty) { (results, index) => val req = Post(endpoint(index), FormData("query" -> q.value)) @@ -154,12 +155,13 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit client.fromJsonTo[SparqlResults](req).mapError(WrappedHttpClientError).map(results ++ _) } .map(SparqlResultsResponse) + .toCatsIO private def sparqlXmlResultsResponse( indices: Iterable[String], q: SparqlQuery, additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, SparqlXmlResultsResponse] = + ): IO[SparqlXmlResultsResponse] = indices.toList .foldLeftM(None: Option[Elem]) { case (elem, index) => val req = Post(endpoint(index), FormData("query" -> q.value)) @@ -176,6 +178,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit } } } + .toCatsIO .map { case Some(elem) => SparqlXmlResultsResponse(elem) case None => SparqlXmlResultsResponse(NodeSeq.Empty) @@ -185,7 +188,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit indices: Iterable[String], q: SparqlQuery, additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, SparqlJsonLdResponse] = + ): IO[SparqlJsonLdResponse] = indices.toList .foldLeftM(Vector.empty[Json]) { (results, index) => val req = Post(endpoint(index), FormData("query" -> q.value)) @@ -196,13 +199,14 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit .mapError(WrappedHttpClientError) .map(results ++ _.arrayOrObject(Vector.empty[Json], identity, obj => Vector(obj.asJson))) } + .toCatsIO .map(vector => SparqlJsonLdResponse(Json.arr(vector: _*))) private def sparqlNTriplesResponse( indices: Iterable[String], q: SparqlQuery, additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, SparqlNTriplesResponse] = + ): IO[SparqlNTriplesResponse] = indices.toList .foldLeftM(NTriples.empty) { (results, index) => val req = Post(endpoint(index), FormData("query" -> q.value)) @@ -210,13 +214,14 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit .withHttpCredentials client.fromEntityTo[String](req).mapError(WrappedHttpClientError).map(s => results ++ NTriples(s, BNode.random)) } + .toCatsIO .map(SparqlNTriplesResponse) private def sparqlRdfXmlResponse( indices: Iterable[String], q: SparqlQuery, additionalHeaders: Seq[HttpHeader] - ): IO[SparqlClientError, SparqlRdfXmlResponse] = + ): IO[SparqlRdfXmlResponse] = indices.toList .foldLeftM(None: Option[Elem]) { case (elem, index) => val req = Post(endpoint(index), FormData("query" -> q.value)) @@ -229,6 +234,7 @@ class SparqlClient(client: HttpClient, endpoint: SparqlQueryEndpoint)(implicit } } } + .toCatsIO .map { case Some(elem) => SparqlRdfXmlResponse(elem) case None => SparqlRdfXmlResponse(NodeSeq.Empty) diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphCoordinator.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphCoordinator.scala index 780b11d80d..57bb2eedf0 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphCoordinator.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphCoordinator.scala @@ -4,7 +4,6 @@ import cats.effect.{ContextShift, IO, Timer} import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViews import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.config.BlazegraphViewsConfig @@ -136,7 +135,6 @@ object BlazegraphCoordinator { (v: ActiveViewDef) => client .createNamespace(v.namespace) - .toCatsIO .onError { e => logger.error(e)(s"Namespace for view '${v.ref.project}/${v.ref.viewId}' could not be created.") } diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSink.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSink.scala index d76fdb11f5..b23198692b 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSink.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSink.scala @@ -15,7 +15,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.config.BatchConfig import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Operation.Sink import com.typesafe.scalalogging.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import fs2.Chunk import shapeless.Typeable @@ -61,7 +60,6 @@ final class BlazegraphSink( if (bulk.queries.nonEmpty) client .bulk(namespace, bulk.queries) - .toCatsIO .redeemWith( err => IO diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/model/DefaultProperties.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/model/DefaultProperties.scala new file mode 100644 index 0000000000..318a7a2d65 --- /dev/null +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/model/DefaultProperties.scala @@ -0,0 +1,3 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model + +final case class DefaultProperties(value: Map[String, String]) diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryDeleter.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryDeleter.scala index 90f50ebba5..90797d48f9 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryDeleter.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryDeleter.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries import cats.effect.{Clock, IO, Timer} -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{CompiledProjection, ExecutionStrategy, ProjectionMetadata, Supervisor} import fs2.{INothing, Stream} @@ -13,7 +12,7 @@ class BlazegraphSlowQueryDeleter(store: BlazegraphSlowQueryStore, deletionThresh ) { def deleteOldQueries: IO[Unit] = { IOInstant.now.flatMap { now => - store.removeQueriesOlderThan(now.minusMillis(deletionThreshold.toMillis)).toCatsIO + store.removeQueriesOlderThan(now.minusMillis(deletionThreshold.toMillis)) } } } diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLogger.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLogger.scala index da148d07ab..dc38630cf0 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLogger.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLogger.scala @@ -1,11 +1,11 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries -import cats.effect.Clock -import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOUtils +import cats.effect.{Clock, IO} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViewsQuery.BlazegraphQueryContext import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.model.BlazegraphSlowQuery -import com.typesafe.scalalogging.Logger -import monix.bio.{IO, UIO} import scala.concurrent.duration.{Duration, FiniteDuration} @@ -24,7 +24,7 @@ trait BlazegraphSlowQueryLogger { * @return * the query */ - def apply[E, A](context: BlazegraphQueryContext, query: IO[E, A]): IO[E, A] + def apply[A](context: BlazegraphQueryContext, query: IO[A]): IO[A] } object BlazegraphSlowQueryLogger { @@ -32,13 +32,14 @@ object BlazegraphSlowQueryLogger { private val logger = Logger[BlazegraphSlowQueryLogger] def apply(sink: BlazegraphSlowQueryStore, longQueryThreshold: Duration)(implicit - clock: Clock[UIO] + clock: Clock[IO] ): BlazegraphSlowQueryLogger = new BlazegraphSlowQueryLogger { - def apply[E, A](context: BlazegraphQueryContext, query: IO[E, A]): IO[E, A] = { - query.attempt.timed - .flatMap { case (duration, outcome) => - UIO - .when(duration >= longQueryThreshold)(logSlowQuery(context, outcome.isLeft, duration)) + def apply[A](context: BlazegraphQueryContext, query: IO[A]): IO[A] = { + IOInstant + .timed(query.attempt) + .flatMap { case (outcome, duration) => + IO + .whenA(duration >= longQueryThreshold)(logSlowQuery(context, outcome.isLeft, duration)) .flatMap(_ => IO.fromEither(outcome)) } } @@ -47,16 +48,13 @@ object BlazegraphSlowQueryLogger { context: BlazegraphQueryContext, isError: Boolean, duration: FiniteDuration - ): UIO[Unit] = { - IOUtils.instant - .tapEval(_ => - UIO.delay(logger.warn(s"Slow blazegraph query recorded: duration '$duration', view '${context.view}'")) - ) - .flatMap { now => - sink - .save(BlazegraphSlowQuery(context.view, context.query, isError, duration, now, context.subject)) - .onErrorHandleWith(e => UIO.delay(logger.error("error logging blazegraph slow query", e))) - } - } + ): IO[Unit] = + logger.warn(s"Slow blazegraph query recorded: duration '$duration', view '${context.view}'") >> + IOInstant.now + .flatMap { now => + sink + .save(BlazegraphSlowQuery(context.view, context.query, isError, duration, now, context.subject)) + .handleErrorWith(e => logger.error(e)("error logging blazegraph slow query")) + } } } diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryStore.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryStore.scala index 9ceaa8d205..5a03d7009b 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryStore.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryStore.scala @@ -1,5 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.model.BlazegraphSlowQuery import ch.epfl.bluebrain.nexus.delta.sdk.views.ViewRef import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors @@ -7,7 +8,6 @@ import doobie.implicits._ import doobie.postgres.implicits._ import ch.epfl.bluebrain.nexus.delta.sourcing.implicits._ import io.circe.syntax.EncoderOps -import monix.bio.Task import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Database._ import java.time.Instant @@ -16,38 +16,38 @@ import java.time.Instant * Persistence operations for slow query logs */ trait BlazegraphSlowQueryStore { - def save(query: BlazegraphSlowQuery): Task[Unit] - def removeQueriesOlderThan(instant: Instant): Task[Unit] - def listForTestingOnly(view: ViewRef): Task[List[BlazegraphSlowQuery]] + def save(query: BlazegraphSlowQuery): IO[Unit] + def listForTestingOnly(view: ViewRef): IO[List[BlazegraphSlowQuery]] + def removeQueriesOlderThan(instant: Instant): IO[Unit] } object BlazegraphSlowQueryStore { def apply(xas: Transactors): BlazegraphSlowQueryStore = { new BlazegraphSlowQueryStore { - override def save(query: BlazegraphSlowQuery): Task[Unit] = { + override def save(query: BlazegraphSlowQuery): IO[Unit] = { sql""" INSERT INTO blazegraph_queries(project, view_id, instant, duration, subject, query, failed) | VALUES(${query.view.project}, ${query.view.viewId}, ${query.instant}, ${query.duration}, ${query.subject.asJson}, ${query.query.value}, ${query.failed}) """.stripMargin.update.run - .transact(xas.write) + .transact(xas.writeCE) .void } - override def listForTestingOnly(view: ViewRef): Task[List[BlazegraphSlowQuery]] = { + override def listForTestingOnly(view: ViewRef): IO[List[BlazegraphSlowQuery]] = { sql""" SELECT project, view_id, instant, duration, subject, query, failed FROM public.blazegraph_queries |WHERE view_id = ${view.viewId} AND project = ${view.project} """.stripMargin .query[BlazegraphSlowQuery] .stream - .transact(xas.read) + .transact(xas.readCE) .compile .toList } - override def removeQueriesOlderThan(instant: Instant): Task[Unit] = { + override def removeQueriesOlderThan(instant: Instant): IO[Unit] = { sql""" DELETE FROM public.blazegraph_queries |WHERE instant < $instant """.stripMargin.update.run - .transact(xas.write) + .transact(xas.writeCE) .void } } diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala index 2a0b199a67..2c4c87e2d4 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala @@ -13,7 +13,7 @@ import monix.execution.Scheduler import scala.concurrent.duration._ -object BlazegraphClientSetup { +object BlazegraphClientSetup extends Fixtures { def resource()(implicit s: Scheduler): Resource[Task, BlazegraphClient] = { for { @@ -25,7 +25,8 @@ object BlazegraphClientSetup { httpClient, s"http://${container.getHost}:${container.getMappedPort(9999)}/blazegraph", None, - 10.seconds + 10.seconds, + defaultProperties ) } } diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/Fixtures.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/Fixtures.scala index 9ad88b2d6d..717ca65a97 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/Fixtures.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/Fixtures.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.utils.CatsEffectsClasspathResourceUtils import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphViewValue import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.contexts.{blazegraph, blazegraphMetadata} import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary @@ -24,5 +25,10 @@ trait Fixtures { Vocabulary.contexts.search -> ContextValue.fromFile("contexts/search.json") ) + val defaultProperties: Map[String, String] = + CatsEffectsClasspathResourceUtils.ioPropertiesOf("blazegraph/index.properties").unsafeRunSync() + def alwaysValidate: ValidateBlazegraphView = (_: BlazegraphViewValue) => IO.unit } + +object Fixtures extends Fixtures diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClientSpec.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClientSpec.scala index 25ad10748d..bdd43a0e9d 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClientSpec.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/BlazegraphClientSpec.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client import akka.actor.ActorSystem import akka.http.scaladsl.model.Uri import akka.testkit.TestKit +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.Fixtures.defaultProperties import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.PatchStrategy._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlClientError.WrappedHttpClientError import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponse.SparqlResultsResponse @@ -22,16 +23,11 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.ComponentDescription.ServiceDescr import ch.epfl.bluebrain.nexus.delta.sdk.model.Name import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.testkit.blazegraph.BlazegraphDocker -import ch.epfl.bluebrain.nexus.testkit.scalatest.{EitherValues, TestMatchers} -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues -import ch.epfl.bluebrain.nexus.testkit.TestHelpers -import ch.epfl.bluebrain.nexus.testkit.scalatest.bio.BIOValues +import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec import io.circe.Json import monix.execution.Scheduler import org.scalatest.concurrent.Eventually -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike -import org.scalatest.{CancelAfterFailure, DoNotDiscover, Inspectors, Suite} +import org.scalatest.{CancelAfterFailure, DoNotDiscover} import scala.concurrent.duration._ import scala.xml.Elem @@ -39,18 +35,10 @@ import scala.xml.Elem @DoNotDiscover class BlazegraphClientSpec(docker: BlazegraphDocker) extends TestKit(ActorSystem("BlazegraphClientSpec")) - with Suite - with AnyWordSpecLike - with Matchers + with CatsEffectSpec with ConfigFixtures - with EitherValues with CancelAfterFailure - with TestHelpers - with Eventually - with Inspectors - with TestMatchers - with BIOValues - with CatsIOValues { + with Eventually { implicit private val sc: Scheduler = Scheduler.global implicit private val httpCfg: HttpClientConfig = httpClientConfig @@ -59,7 +47,7 @@ class BlazegraphClientSpec(docker: BlazegraphDocker) private lazy val endpoint = docker.hostConfig.endpoint private lazy val client = - BlazegraphClient(HttpClient(), endpoint, None, 10.seconds) + BlazegraphClient(HttpClient(), endpoint, None, 10.seconds, defaultProperties) private lazy val graphId = endpoint / "graphs" / "myid" private def nTriples(id: String = genString(), label: String = genString(), value: String = genString()) = { diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlQueryClientDummy.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlQueryClientDummy.scala index 3647337346..2b2ab664a6 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlQueryClientDummy.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/client/SparqlQueryClientDummy.scala @@ -1,5 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client import akka.http.scaladsl.model.HttpHeader +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryClientDummy.bNode import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponse._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType._ @@ -7,7 +8,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.BNode import ch.epfl.bluebrain.nexus.delta.rdf.graph.NTriples import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery import io.circe.Json -import monix.bio.IO import scala.xml.NodeSeq @@ -23,7 +23,7 @@ class SparqlQueryClientDummy( q: SparqlQuery, responseType: Aux[R], additionalHeaders: Seq[HttpHeader] = Seq.empty - ): IO[SparqlClientError, R] = + ): IO[R] = responseType match { case SparqlResultsJson => IO.pure(SparqlResultsResponse(sparqlResults(indices))) diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSinkSuite.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSinkSuite.scala index 931b3d3634..a495cffc2f 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSinkSuite.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/indexing/BlazegraphSinkSuite.scala @@ -1,6 +1,5 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphClientSetup import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri @@ -73,7 +72,6 @@ class BlazegraphSinkSuite private def query(namespace: String) = client .query(Set(namespace), constructQuery, SparqlQueryResponseType.SparqlNTriples) - .toCatsIO .map { response => Graph(response.value).toOption } test("Create the namespace") { @@ -127,7 +125,7 @@ class BlazegraphSinkSuite val expected = createGraph(Chunk(resource2Id -> resource2Ntriples, resource1Id -> resource1NtriplesUpdated)) for { - _ <- client.createNamespace(namespace).toCatsIO.assertEquals(true) + _ <- client.createNamespace(namespace).assertEquals(true) _ <- sink.apply(asElems(input)) _ <- query(namespace).assertSome(expected) } yield () @@ -150,7 +148,7 @@ class BlazegraphSinkSuite val expected = createGraph(Chunk.singleton(resource2Id -> resource2Ntriples)) for { - _ <- client.createNamespace(namespace).toCatsIO.assertEquals(true) + _ <- client.createNamespace(namespace).assertEquals(true) _ <- sink.apply(chunk) _ <- query(namespace).assertSome(expected) } yield () diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsIndexingRoutesSpec.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsIndexingRoutesSpec.scala index e2806f25a2..6c147b8191 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsIndexingRoutesSpec.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsIndexingRoutesSpec.scala @@ -23,7 +23,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.projections.{ProjectionErrors, Pro import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.FailedElem import ch.epfl.bluebrain.nexus.delta.sourcing.stream.ProjectionProgress -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import ch.epfl.bluebrain.nexus.testkit.ce.CatsRunContext import java.time.Instant diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQueryDummy.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQueryDummy.scala index 188473ec69..c84adefa7b 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQueryDummy.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQueryDummy.scala @@ -2,7 +2,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.routes import cats.effect.IO import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.Aux import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.{SparqlClientError, SparqlQueryClient, SparqlQueryResponse} @@ -47,7 +46,7 @@ private[routes] class BlazegraphViewsQueryDummy( for { view <- views.fetch(id, project) _ <- IO.raiseWhen(view.deprecated)(ViewIsDeprecated(view.id)) - response <- client.query(Set(id.toString), query, responseType).toCatsIO.adaptError { case e: SparqlClientError => + response <- client.query(Set(id.toString), query, responseType).adaptError { case e: SparqlClientError => WrappedBlazegraphClientError(e) } } yield response diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala index 5465563edd..32009dda9e 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala @@ -4,6 +4,7 @@ import akka.actor.ActorSystem import akka.http.scaladsl.model.Uri import akka.testkit.TestKit import cats.data.NonEmptySet +import cats.effect.IO import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.RetryStrategyConfig.AlwaysGiveUp import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ @@ -42,7 +43,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ResourceRe import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture import ch.epfl.bluebrain.nexus.testkit.blazegraph.BlazegraphDocker import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import monix.bio.{IO => BIO} import monix.execution.Scheduler import org.scalatest.concurrent.Eventually import org.scalatest.{CancelAfterFailure, DoNotDiscover, Inspectors} @@ -63,7 +63,7 @@ class BlazegraphViewsQuerySpec(docker: BlazegraphDocker) implicit override def patienceConfig: PatienceConfig = PatienceConfig(6.seconds, 100.millis) private val noopSlowQueryLogger: BlazegraphSlowQueryLogger = new BlazegraphSlowQueryLogger { - override def apply[E, A](context: BlazegraphQueryContext, query: BIO[E, A]): BIO[E, A] = query + override def apply[A](context: BlazegraphQueryContext, query: IO[A]): IO[A] = query } implicit private val sc: Scheduler = Scheduler.global @@ -74,7 +74,7 @@ class BlazegraphViewsQuerySpec(docker: BlazegraphDocker) private lazy val endpoint = docker.hostConfig.endpoint private lazy val client = - BlazegraphClient(HttpClient(), endpoint, None, 10.seconds) + BlazegraphClient(HttpClient(), endpoint, None, 10.seconds, defaultProperties) private val realm = Label.unsafe("myrealm") implicit private val alice: Caller = Caller(User("Alice", realm), Set(User("Alice", realm), Group("users", realm))) diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutesSpec.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutesSpec.scala index cd4a283c01..bf54741169 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutesSpec.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutesSpec.scala @@ -7,7 +7,6 @@ import akka.http.scaladsl.server.Route import akka.util.ByteString import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.{UUIDF, UrlUtils} -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViews import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.{SparqlQueryClientDummy, SparqlResults} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphViewRejection.ProjectContextRejection @@ -72,7 +71,7 @@ class BlazegraphViewsRoutesSpec extends BlazegraphViewRoutesFixtures { Map("resource-incoming-outgoing" -> linksResults) ) - private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => IO.none.toUIO, _ => IO.none.toUIO) + private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => IO.none, _ => IO.none) private lazy val routes = Route.seal( BlazegraphViewsRoutesHandler( diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLoggerSuite.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLoggerSuite.scala index 4e8d65c79c..f4848cb40e 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLoggerSuite.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/slowqueries/BlazegraphSlowQueryLoggerSuite.scala @@ -1,5 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViewsQuery.BlazegraphQueryContext import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.BlazegraphSlowQueryLoggerSuite._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.model.BlazegraphSlowQuery @@ -8,22 +10,21 @@ import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery import ch.epfl.bluebrain.nexus.delta.sdk.views.ViewRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie -import ch.epfl.bluebrain.nexus.testkit.mu.bio.BioSuite -import monix.bio.Task +import ch.epfl.bluebrain.nexus.testkit.mu.ce.CatsEffectSuite import munit.AnyFixture import java.time.Instant -import scala.concurrent.duration.DurationInt +import scala.concurrent.duration.{DurationInt, FiniteDuration} object BlazegraphSlowQueryLoggerSuite { private val LongQueryThreshold = 100.milliseconds private val StoreWhichFails: BlazegraphSlowQueryStore = new BlazegraphSlowQueryStore { - override def save(query: BlazegraphSlowQuery): Task[Unit] = - Task.raiseError(new RuntimeException("error saving slow log")) + override def save(query: BlazegraphSlowQuery): IO[Unit] = + IO.raiseError(new RuntimeException("error saving slow log")) - override def removeQueriesOlderThan(instant: Instant): Task[Unit] = Task.unit + override def removeQueriesOlderThan(instant: Instant): IO[Unit] = IO.unit - override def listForTestingOnly(view: ViewRef): Task[List[BlazegraphSlowQuery]] = Task.pure(Nil) + override def listForTestingOnly(view: ViewRef): IO[List[BlazegraphSlowQuery]] = IO.pure(Nil) } private val view = ViewRef(ProjectRef.unsafe("epfl", "blue-brain"), Iri.unsafe("hippocampus")) @@ -31,7 +32,7 @@ object BlazegraphSlowQueryLoggerSuite { private val user = Identity.User("Ted Lasso", Label.unsafe("epfl")) } -class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with BlazegraphSlowQueryStoreFixture { +class BlazegraphSlowQueryLoggerSuite extends CatsEffectSuite with Doobie.Fixture with BlazegraphSlowQueryStoreFixture { override def munitFixtures: Seq[AnyFixture[_]] = List(doobie, blazegraphSlowQueryStore) @@ -44,6 +45,15 @@ class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with B (logger, store.listForTestingOnly(view)) } + private def assertSavedQuery(actual: BlazegraphSlowQuery, failed: Boolean, minDuration: FiniteDuration): Unit = { + assertEquals(actual.view, view) + assertEquals(actual.query, sparqlQuery) + assertEquals(actual.subject, user) + assertEquals(actual.failed, failed) + assertEquals(actual.instant, Instant.EPOCH) + assert(actual.duration >= minDuration) + } + test("slow query logged") { val (logSlowQuery, getLoggedQueries) = fixture @@ -55,12 +65,13 @@ class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with B sparqlQuery, user ), - Task.sleep(101.milliseconds) + IO.sleep(101.milliseconds) ) saved <- getLoggedQueries } yield { assertEquals(saved.size, 1) - val onlyRecord = saved.head + assertSavedQuery(saved.head, failed = false, 101.millis) + val onlyRecord: BlazegraphSlowQuery = saved.head assertEquals(onlyRecord.view, view) assertEquals(onlyRecord.query, sparqlQuery) assertEquals(onlyRecord.subject, user) @@ -75,24 +86,19 @@ class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with B val (logSlowQuery, getLoggedQueries) = fixture for { - _ <- logSlowQuery( - BlazegraphQueryContext( - view, - sparqlQuery, - user - ), - Task.sleep(101.milliseconds) >> Task.raiseError(new RuntimeException()) - ).failed - saved <- getLoggedQueries + maybeResult <- logSlowQuery( + BlazegraphQueryContext( + view, + sparqlQuery, + user + ), + IO.sleep(101.milliseconds) >> IO.raiseError(new RuntimeException()) + ).attempt + saved <- getLoggedQueries } yield { + assert(maybeResult.isLeft) assertEquals(saved.size, 1) - val onlyRecord = saved.head - assertEquals(onlyRecord.view, view) - assertEquals(onlyRecord.query, sparqlQuery) - assertEquals(onlyRecord.subject, user) - assertEquals(onlyRecord.failed, true) - assertEquals(onlyRecord.instant, Instant.EPOCH) - assert(onlyRecord.duration > 100.milliseconds) + assertSavedQuery(saved.head, failed = true, 101.millis) } } @@ -107,7 +113,7 @@ class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with B sparqlQuery, user ), - Task.sleep(50.milliseconds) + IO.sleep(50.milliseconds) ) saved <- getLoggedQueries } yield { @@ -127,7 +133,7 @@ class BlazegraphSlowQueryLoggerSuite extends BioSuite with Doobie.Fixture with B sparqlQuery, user ), - Task.sleep(101.milliseconds).as("result") - ).assert("result") + IO.sleep(101.milliseconds).as("result") + ).assertEquals("result") } } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/BlazegraphQuery.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/BlazegraphQuery.scala index 79861217e5..c54bba32f4 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/BlazegraphQuery.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/BlazegraphQuery.scala @@ -2,7 +2,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews import cats.effect.IO import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.Aux import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing.CompositeViewDef.ActiveViewDef @@ -120,7 +119,9 @@ object BlazegraphQuery { AuthorizationFailed(s"Defined permissions on sparql projection on '${view.ref}' are missing.") ) namespace = commonNamespace(view.uuid, view.indexingRev, prefix) - result <- client.query(Set(namespace), query, responseType).mapError(WrappedBlazegraphClientError) + result <- client.query(Set(namespace), query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield result override def query[R <: SparqlQueryResponse]( @@ -136,7 +137,9 @@ object BlazegraphQuery { _ <- aclCheck.authorizeForOr(project, projection.permission)(AuthorizationFailed(project, projection.permission)) namespace = projectionNamespace(projection, view.uuid, prefix) - result <- client.query(Set(namespace), query, responseType).mapError(WrappedBlazegraphClientError) + result <- client.query(Set(namespace), query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield result override def queryProjections[R <: SparqlQueryResponse]( @@ -148,7 +151,9 @@ object BlazegraphQuery { for { view <- fetchView(id, project) namespaces <- allowedProjections(view, project) - result <- client.query(namespaces, query, responseType).mapError(WrappedBlazegraphClientError) + result <- client.query(namespaces, query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield result private def fetchProjection(view: ActiveViewDef, projectionId: IdSegment) = diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeSink.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeSink.scala index 22195ea3a9..51dbe94e95 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeSink.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeSink.scala @@ -65,7 +65,7 @@ final class Single[SinkFormat]( private def queryTransform: GraphResource => IO[Option[SinkFormat]] = gr => for { - graph <- queryGraph(gr).toCatsIO + graph <- queryGraph(gr) transformed <- graph.flatTraverse(transform) } yield transformed @@ -115,7 +115,7 @@ final class Batch[SinkFormat]( /** Performs the sparql query only using [[SuccessElem]]s from the chunk */ private def query(elements: Chunk[Elem[GraphResource]]): IO[Option[Graph]] = elements.mapFilter(elem => elem.map(_.id).toOption) match { - case ids if ids.nonEmpty => queryGraph(ids).toCatsIO + case ids if ids.nonEmpty => queryGraph(ids) case _ => IO.none } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala index 6a1e4432a8..9520cb9618 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala @@ -63,8 +63,8 @@ final class CompositeViews private ( baseUri: BaseUri ): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onCreate(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onCreate(project) + iri <- expandIri(id, pc) res <- eval(CreateCompositeView(iri, project, value, value.toJson(iri), subject, pc.base)) } yield res }.span("createCompositeView") @@ -82,8 +82,8 @@ final class CompositeViews private ( */ def create(project: ProjectRef, source: Json)(implicit caller: Caller): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onCreate(project)) - (iri, value) <- toCatsIO(sourceDecoder(project, pc, source)) + pc <- fetchContext.onCreate(project) + (iri, value) <- sourceDecoder(project, pc, source).toCatsIO res <- eval(CreateCompositeView(iri, project, value, source, caller.subject, pc.base)) } yield res }.span("createCompositeView") @@ -101,8 +101,8 @@ final class CompositeViews private ( */ def create(id: IdSegment, project: ProjectRef, source: Json)(implicit caller: Caller): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onCreate(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onCreate(project) + iri <- expandIri(id, pc) viewValue <- toCatsIO(sourceDecoder(project, pc, iri, source)) res <- eval(CreateCompositeView(iri, project, viewValue, source, caller.subject, pc.base)) } yield res @@ -132,8 +132,8 @@ final class CompositeViews private ( baseUri: BaseUri ): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onModify(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) source = value.toJson(iri) res <- eval(UpdateCompositeView(iri, project, rev, value, source, subject, pc.base)) } yield res @@ -155,8 +155,8 @@ final class CompositeViews private ( */ def update(id: IdSegment, project: ProjectRef, rev: Int, source: Json)(implicit caller: Caller): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onModify(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) viewValue <- toCatsIO(sourceDecoder(project, pc, iri, source)) res <- eval(UpdateCompositeView(iri, project, rev, viewValue, source, caller.subject, pc.base)) } yield res @@ -186,8 +186,8 @@ final class CompositeViews private ( rev: Int )(implicit subject: Subject): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onModify(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) res <- eval(TagCompositeView(iri, project, tagRev, tag, rev, subject)) } yield res }.span("tagCompositeView") @@ -210,8 +210,8 @@ final class CompositeViews private ( rev: Int )(implicit subject: Subject): IO[ViewResource] = { for { - pc <- toCatsIO(fetchContext.onModify(project)) - iri <- toCatsIO(expandIri(id, pc)) + pc <- fetchContext.onModify(project) + iri <- expandIri(id, pc) res <- eval(DeprecateCompositeView(iri, project, rev, subject)) } yield res }.span("deprecateCompositeView") @@ -249,8 +249,8 @@ final class CompositeViews private ( project: ProjectRef ): IO[CompositeViewState] = { for { - pc <- fetchContext.onRead(project).toCatsIO - iri <- expandIri(id.value, pc).toCatsIO + pc <- fetchContext.onRead(project) + iri <- expandIri(id.value, pc) notFound = ViewNotFound(iri, project) state <- id match { case Latest(_) => log.stateOr(project, iri, notFound) diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala index 1edd331b57..8fd731d4aa 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala @@ -6,6 +6,7 @@ import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.DefaultProperties import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.client.DeltaClient import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.deletion.CompositeViewsDeletionTask @@ -72,13 +73,15 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { ( cfg: CompositeViewsConfig, client: HttpClient @Id("http-indexing-client"), - as: ActorSystem[Nothing] + as: ActorSystem[Nothing], + properties: DefaultProperties ) => BlazegraphClient( client, cfg.blazegraphAccess.base, cfg.blazegraphAccess.credentials, - cfg.blazegraphAccess.queryTimeout + cfg.blazegraphAccess.queryTimeout, + properties.value )(as.classicSystem) } @@ -86,13 +89,15 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { ( cfg: CompositeViewsConfig, client: HttpClient @Id("http-query-client"), - as: ActorSystem[Nothing] + as: ActorSystem[Nothing], + properties: DefaultProperties ) => BlazegraphClient( client, cfg.blazegraphAccess.base, cfg.blazegraphAccess.credentials, - cfg.blazegraphAccess.queryTimeout + cfg.blazegraphAccess.queryTimeout, + properties.value )(as.classicSystem) } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/BatchQueryGraph.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/BatchQueryGraph.scala index b8dcda686b..0961242488 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/BatchQueryGraph.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/BatchQueryGraph.scala @@ -1,7 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.SparqlNTriples import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection.idTemplating @@ -9,7 +9,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.graph.{Graph, NTriples} import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery.SparqlConstructQuery import fs2.Chunk -import monix.bio.Task import java.util.regex.Pattern.quote @@ -27,16 +26,16 @@ final class BatchQueryGraph(client: BlazegraphClient, namespace: String, query: private val logger = Logger[BatchQueryGraph] - private def newGraph(ntriples: NTriples): Task[Option[Graph]] = - if (ntriples.isEmpty) Task.none - else Task.fromEither(Graph(ntriples)).map(Some(_)) + private def newGraph(ntriples: NTriples): IO[Option[Graph]] = + if (ntriples.isEmpty) IO.none + else IO.fromEither(Graph(ntriples)).map(Some(_)) - def apply(ids: Chunk[Iri]): Task[Option[Graph]] = + def apply(ids: Chunk[Iri]): IO[Option[Graph]] = for { ntriples <- client.query(Set(namespace), replaceIds(query, ids), SparqlNTriples) graphResult <- newGraph(ntriples.value) - _ <- Task.when(graphResult.isEmpty)( - logger.debug(s"Querying blazegraph did not return any triples, '$ids' will be dropped.").toUIO + _ <- IO.whenA(graphResult.isEmpty)( + logger.debug(s"Querying blazegraph did not return any triples, '$ids' will be dropped.") ) } yield graphResult diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeSpaces.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeSpaces.scala index b038c4f339..7741f6618f 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeSpaces.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeSpaces.scala @@ -3,7 +3,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing import cats.effect.IO import cats.implicits.catsSyntaxFlatMapOps import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing.CompositeViewDef.ActiveViewDef import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection @@ -43,21 +42,21 @@ object CompositeSpaces { ): CompositeSpaces = new CompositeSpaces { override def init(view: ActiveViewDef): IO[Unit] = { val common = commonNamespace(view.uuid, view.indexingRev, prefix) - val createCommon = blazeClient.createNamespace(common).toCatsIO.void + val createCommon = blazeClient.createNamespace(common).void val result = view.value.projections.foldLeft[IO[Unit]](createCommon) { case (acc, e: ElasticSearchProjection) => val index = projectionIndex(e, view.uuid, prefix) acc >> esClient.createIndex(index, Some(e.mapping), e.settings).void case (acc, s: SparqlProjection) => val namespace = projectionNamespace(s, view.uuid, prefix) - acc >> blazeClient.createNamespace(namespace).toCatsIO.void + acc >> blazeClient.createNamespace(namespace).void } logger.debug(s"Creating namespaces and indices for composite view ${view.ref}") >> result } override def destroyAll(view: ActiveViewDef): IO[Unit] = { val common = commonNamespace(view.uuid, view.indexingRev, prefix) - val deleteCommon = blazeClient.deleteNamespace(common).toCatsIO.void + val deleteCommon = blazeClient.deleteNamespace(common).void val result = view.value.projections.foldLeft[IO[Unit]](deleteCommon) { case (acc, p) => acc >> destroyProjection(view, p) } @@ -72,7 +71,7 @@ object CompositeSpaces { esClient.deleteIndex(index).void case s: SparqlProjection => val namespace = projectionNamespace(s, view.uuid, prefix) - blazeClient.deleteNamespace(namespace).toCatsIO.void + blazeClient.deleteNamespace(namespace).void } } } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/SingleQueryGraph.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/SingleQueryGraph.scala index b183e17bac..dd14fe85e3 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/SingleQueryGraph.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/SingleQueryGraph.scala @@ -1,7 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.SparqlNTriples import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection.idTemplating @@ -9,7 +9,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.graph.{Graph, NTriples} import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery.SparqlConstructQuery import ch.epfl.bluebrain.nexus.delta.sourcing.state.GraphResource -import monix.bio.Task import java.util.regex.Pattern.quote @@ -27,21 +26,21 @@ final class SingleQueryGraph(client: BlazegraphClient, namespace: String, query: private val logger = Logger[SingleQueryGraph] - private def newGraph(ntriples: NTriples, id: Iri): Task[Option[Graph]] = + private def newGraph(ntriples: NTriples, id: Iri): IO[Option[Graph]] = if (ntriples.isEmpty) { // If nothing is returned by the query, we skip - Task.none + IO.none } else - Task.fromEither(Graph(ntriples.copy(rootNode = id))).map { g => + IO.fromEither(Graph(ntriples.copy(rootNode = id))).map { g => Some(g.replaceRootNode(id)) } - def apply(graphResource: GraphResource): Task[Option[GraphResource]] = + def apply(graphResource: GraphResource): IO[Option[GraphResource]] = for { ntriples <- client.query(Set(namespace), replaceId(query, graphResource.id), SparqlNTriples) graphResult <- newGraph(ntriples.value, graphResource.id) - _ <- Task.when(graphResult.isEmpty)( - logger.debug(s"Querying blazegraph did not return any triples, '$graphResource' will be dropped.").toUIO + _ <- IO.whenA(graphResult.isEmpty)( + logger.debug(s"Querying blazegraph did not return any triples, '$graphResource' will be dropped.") ) } yield graphResult.map(g => graphResource.copy(graph = g)) diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala index a788d7543d..f610f81013 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala @@ -337,13 +337,13 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst for { // Initialise the namespaces and indices _ <- spaces.init(view) - _ <- bgClient.existsNamespace(commonNs).toCatsIO.assertEquals(true) - _ <- bgClient.existsNamespace(sparqlNamespace).toCatsIO.assertEquals(true) + _ <- bgClient.existsNamespace(commonNs).assertEquals(true) + _ <- bgClient.existsNamespace(sparqlNamespace).assertEquals(true) _ <- esClient.existsIndex(elasticIndex).assertEquals(true) // Delete them on destroy _ <- spaces.destroyAll(view) - _ <- bgClient.existsNamespace(commonNs).toCatsIO.assertEquals(false) - _ <- bgClient.existsNamespace(sparqlNamespace).toCatsIO.assertEquals(false) + _ <- bgClient.existsNamespace(commonNs).assertEquals(false) + _ <- bgClient.existsNamespace(sparqlNamespace).assertEquals(false) _ <- esClient.existsIndex(elasticIndex).assertEquals(false) } yield () } @@ -362,18 +362,18 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst for { // Initialise the namespaces and indices _ <- spaces.init(view) - _ <- bgClient.existsNamespace(commonNs).toCatsIO.assertEquals(true) - _ <- bgClient.existsNamespace(sparqlNamespace).toCatsIO.assertEquals(true) + _ <- bgClient.existsNamespace(commonNs).assertEquals(true) + _ <- bgClient.existsNamespace(sparqlNamespace).assertEquals(true) _ <- esClient.existsIndex(elasticIndex).assertEquals(true) // Delete the blazegraph projection _ <- spaces.destroyProjection(view, blazegraphProjection) - _ <- bgClient.existsNamespace(commonNs).toCatsIO.assertEquals(true) - _ <- bgClient.existsNamespace(sparqlNamespace).toCatsIO.assertEquals(false) + _ <- bgClient.existsNamespace(commonNs).assertEquals(true) + _ <- bgClient.existsNamespace(sparqlNamespace).assertEquals(false) _ <- esClient.existsIndex(elasticIndex).assertEquals(true) // Delete the elasticsearch projection _ <- spaces.destroyProjection(view, elasticSearchProjection) - _ <- bgClient.existsNamespace(commonNs).toCatsIO.assertEquals(true) - _ <- bgClient.existsNamespace(sparqlNamespace).toCatsIO.assertEquals(false) + _ <- bgClient.existsNamespace(commonNs).assertEquals(true) + _ <- bgClient.existsNamespace(sparqlNamespace).assertEquals(false) _ <- esClient.existsIndex(elasticIndex).assertEquals(false) } yield () } @@ -436,7 +436,7 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst resultMuse, resultRedHot ).eventually(()) - _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result.nt")).toCatsIO + _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result.nt")) } yield () } @@ -475,7 +475,7 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst resultMuseMetadata, resultRedHotMetadata ).eventually(()) - _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result_metadata.nt")).toCatsIO.eventually(()) + _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result_metadata.nt")).eventually(()) } yield () } @@ -521,7 +521,7 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst resultMuse, resultRedHot ).eventually(()) - _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result.nt")).toCatsIO.eventually(()) + _ <- checkBlazegraphTriples(sparqlNamespace, contentOf("indexing/result.nt")).eventually(()) } yield () for { diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/BlazegraphQueryDummy.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/BlazegraphQueryDummy.scala index bfe3cd6be7..fec3f94fce 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/BlazegraphQueryDummy.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/BlazegraphQueryDummy.scala @@ -1,9 +1,9 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.routes import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import cats.implicits.catsSyntaxMonadError import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.Aux -import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.{SparqlQueryClient, SparqlQueryResponse} +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.{SparqlClientError, SparqlQueryClient, SparqlQueryResponse} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewRejection.{ViewIsDeprecated, WrappedBlazegraphClientError} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.{BlazegraphQuery, CompositeViews} import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery @@ -22,7 +22,9 @@ class BlazegraphQueryDummy(client: SparqlQueryClient, views: CompositeViews) ext for { view <- views.fetch(id, project) _ <- IO.raiseWhen(view.deprecated)(ViewIsDeprecated(view.id)) - res <- client.query(Set("queryCommonNs"), query, responseType).mapError(WrappedBlazegraphClientError) + res <- client.query(Set("queryCommonNs"), query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield res override def query[R <: SparqlQueryResponse]( @@ -35,7 +37,9 @@ class BlazegraphQueryDummy(client: SparqlQueryClient, views: CompositeViews) ext for { view <- views.fetch(id, project) _ <- IO.raiseWhen(view.deprecated)(ViewIsDeprecated(view.id)) - res <- client.query(Set("queryProjection"), query, responseType).mapError(WrappedBlazegraphClientError) + res <- client.query(Set("queryProjection"), query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield res override def queryProjections[R <: SparqlQueryResponse]( @@ -47,7 +51,9 @@ class BlazegraphQueryDummy(client: SparqlQueryClient, views: CompositeViews) ext for { view <- views.fetch(id, project) _ <- IO.raiseWhen(view.deprecated)(ViewIsDeprecated(view.id)) - res <- client.query(Set("queryProjections"), query, responseType).mapError(WrappedBlazegraphClientError) + res <- client.query(Set("queryProjections"), query, responseType).adaptError { case e: SparqlClientError => + WrappedBlazegraphClientError(e) + } } yield res } diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsIndexingRoutesSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsIndexingRoutesSpec.scala index 4c533f013a..dbea86483c 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsIndexingRoutesSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsIndexingRoutesSpec.scala @@ -32,7 +32,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.FailedElem import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{ProjectionProgress, RemainingElems} import io.circe.Json import io.circe.syntax._ -import monix.bio.UIO import java.time.Instant import scala.concurrent.duration._ @@ -44,7 +43,7 @@ class CompositeViewsIndexingRoutesSpec extends CompositeViewsRoutesFixtures { private val nowPlus5 = now.plusSeconds(5) private val fetchContext = FetchContextDummy[CompositeViewRejection](List(project), ProjectContextRejection) - private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => UIO.none, _ => UIO.none) + private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => IO.none, _ => IO.none) private val myId = nxv + "myid" private val view = CompositeViewsGen.resourceFor(projectRef, myId, uuid, viewValue, source = Json.obj()) diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala index 7255f5aa7d..bea4a8c81d 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala @@ -5,6 +5,7 @@ import akka.http.scaladsl.model.headers.{`Content-Type`, Accept, Location} import akka.http.scaladsl.model.{HttpEntity, StatusCodes, Uri} import akka.http.scaladsl.server.Route import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryClientDummy import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.CompositeViews @@ -28,7 +29,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Anonymous import io.circe.syntax._ -import monix.bio.UIO class CompositeViewsRoutesSpec extends CompositeViewsRoutesFixtures { @@ -54,7 +54,7 @@ class CompositeViewsRoutesSpec extends CompositeViewsRoutesFixtures { private val responseQueryProjections = NTriples("queryProjections", BNode.random) private val fetchContext = FetchContextDummy[CompositeViewRejection](List(project), ProjectContextRejection) - private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => UIO.none, _ => UIO.none) + private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => IO.none, _ => IO.none) private lazy val views: CompositeViews = CompositeViews( fetchContext, diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViews.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViews.scala index 5d64b37e75..816d77c590 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViews.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViews.scala @@ -36,7 +36,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model._ import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset import io.circe.Json -import monix.bio.{IO => BIO} import java.util.UUID @@ -109,7 +108,7 @@ final class ElasticSearchViews private ( source: Json )(implicit caller: Caller): IO[ViewResource] = { for { - pc <- fetchContext.onCreate(project).toCatsIO + pc <- fetchContext.onCreate(project) (iri, value) <- sourceDecoder(project, pc, source) res <- eval(CreateElasticSearchView(iri, project, value, source, caller.subject)) } yield res @@ -359,11 +358,11 @@ final class ElasticSearchViews private ( .map(_._2.toResource(defaultElasticsearchMapping, defaultElasticsearchSettings)) private def expandWithContext( - fetchCtx: ProjectRef => BIO[ElasticSearchViewRejection, ProjectContext], + fetchCtx: ProjectRef => IO[ProjectContext], ref: ProjectRef, id: IdSegment ): IO[(Iri, ProjectContext)] = - fetchCtx(ref).flatMap(pc => expandIri(id, pc).map(_ -> pc)).toCatsIO + fetchCtx(ref).flatMap(pc => expandIri(id, pc).map(_ -> pc)) } object ElasticSearchViews { diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/query/DefaultSearchRequest.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/query/DefaultSearchRequest.scala index a2382019a9..a12ae8a54f 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/query/DefaultSearchRequest.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/query/DefaultSearchRequest.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.query import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.kernel.error.Rejection import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ResourcesSearchParams @@ -62,7 +61,6 @@ object DefaultSearchRequest { )(fetchContext: FetchContext[ElasticSearchQueryError]): IO[ProjectSearch] = fetchContext .onRead(ref) - .toCatsIO .flatMap { context => IO.fromEither(expandResourceRef(schema, context.apiMappings, context.base)) } diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchIndexingRoutesSpec.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchIndexingRoutesSpec.scala index 4aa30360e1..308b8bc27c 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchIndexingRoutesSpec.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchIndexingRoutesSpec.scala @@ -27,7 +27,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.projections.{ProjectionErrors, Pro import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.FailedElem import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{PipeChain, ProjectionProgress} -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.JsonObject import java.time.Instant diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala index a068a0d417..36f21b9d52 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala @@ -17,7 +17,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.{FetchContext, FetchContextDummy} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Anonymous import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.syntax._ import io.circe.{Json, JsonObject} diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutesSpec.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutesSpec.scala index 3dfeb8439a..41f02d6862 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutesSpec.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutesSpec.scala @@ -22,7 +22,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.{FetchContext, FetchContextDum import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Subject} import ch.epfl.bluebrain.nexus.delta.sourcing.stream.PipeChain -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.Json class ElasticSearchViewsRoutesSpec extends ElasticSearchViewsRoutesFixtures with IOFromMap { diff --git a/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/GraphAnalytics.scala b/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/GraphAnalytics.scala index 9d6a2b51e2..2587cab08c 100644 --- a/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/GraphAnalytics.scala +++ b/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/GraphAnalytics.scala @@ -4,7 +4,6 @@ import akka.http.scaladsl.model.Uri.Query import cats.data.NonEmptySeq import cats.effect.IO import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.client.{ElasticSearchClient, IndexLabel, QueryBuilder} import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ElasticSearchViewRejection.WrappedElasticSearchClientError import ch.epfl.bluebrain.nexus.delta.plugins.graph.analytics.config.GraphAnalyticsConfig.TermAggregationsConfig @@ -52,7 +51,7 @@ object GraphAnalytics { override def relationships(projectRef: ProjectRef): IO[AnalyticsGraph] = for { - _ <- fetchContext.onRead(projectRef).toCatsIO + _ <- fetchContext.onRead(projectRef) query <- relationshipsAggQuery(config) stats <- client .searchAs[AnalyticsGraph](QueryBuilder(query), index(prefix, projectRef).value, Query.Empty) @@ -74,8 +73,8 @@ object GraphAnalytics { } for { - pc <- fetchContext.onRead(projectRef).toCatsIO - tpeIri <- expandIri(tpe, pc).toCatsIO + pc <- fetchContext.onRead(projectRef) + tpeIri <- expandIri(tpe, pc) query <- propertiesAggQueryFor(tpeIri) stats <- search(tpeIri, index(prefix, projectRef), query) } yield stats diff --git a/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/model/GraphAnalyticsRejection.scala b/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/model/GraphAnalyticsRejection.scala index 63b14ca92a..acf30e1ac5 100644 --- a/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/model/GraphAnalyticsRejection.scala +++ b/delta/plugins/graph-analytics/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/graph/analytics/model/GraphAnalyticsRejection.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.graph.analytics.model import akka.http.scaladsl.model.StatusCodes +import ch.epfl.bluebrain.nexus.delta.kernel.error.Rejection import ch.epfl.bluebrain.nexus.delta.kernel.utils.ClassUtils import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ElasticSearchViewRejection import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts @@ -19,7 +20,7 @@ import io.circe.{Encoder, JsonObject} * @param reason * a descriptive message as to why the rejection occurred */ -sealed abstract class GraphAnalyticsRejection(val reason: String) extends Exception(reason) +sealed abstract class GraphAnalyticsRejection(val reason: String) extends Rejection object GraphAnalyticsRejection { diff --git a/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchSparqlQuerySpec.scala b/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchSparqlQuerySpec.scala index bd9defd8c1..f60698f399 100644 --- a/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchSparqlQuerySpec.scala +++ b/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchSparqlQuerySpec.scala @@ -4,6 +4,7 @@ import akka.actor.ActorSystem import akka.testkit.TestKit import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.Fixtures.defaultProperties import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.SparqlQueryResponseType.SparqlNTriples import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode @@ -17,32 +18,23 @@ import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.testkit.blazegraph.BlazegraphDocker -import ch.epfl.bluebrain.nexus.testkit.scalatest.{EitherValues, TestMatchers} -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues -import ch.epfl.bluebrain.nexus.testkit.TestHelpers -import ch.epfl.bluebrain.nexus.testkit.scalatest.bio.BIOValues +import ch.epfl.bluebrain.nexus.testkit.scalatest.EitherValues +import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.{CatsEffectSpec, CatsIOValues} import io.circe.Json import monix.execution.Scheduler +import org.scalatest.CancelAfterFailure import org.scalatest.concurrent.Eventually -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike -import org.scalatest.{CancelAfterFailure, Inspectors} import java.util.regex.Pattern.quote import scala.concurrent.duration._ class SearchSparqlQuerySpec extends TestKit(ActorSystem("SearchSparqlQuerySpec")) - with AnyWordSpecLike - with Matchers + with CatsEffectSpec with ConfigFixtures with EitherValues with CancelAfterFailure - with TestHelpers with Eventually - with Inspectors - with TestMatchers - with BIOValues with CatsIOValues with BlazegraphDocker { @@ -58,7 +50,7 @@ class SearchSparqlQuerySpec ) private lazy val endpoint = hostConfig.endpoint - private lazy val client = BlazegraphClient(HttpClient(), endpoint, None, 10.seconds) + private lazy val client = BlazegraphClient(HttpClient(), endpoint, None, 10.seconds, defaultProperties) private def toNTriples(json: Json): NTriples = { for { @@ -94,7 +86,7 @@ class SearchSparqlQuerySpec val q = contentOf("construct-query.sparql").replaceAll(quote("{resource_id}"), traceId.rdfFormat) val query = SparqlConstructQuery(q).rightValue val compacted = for { - ntriples <- toCatsIO(client.query(Set(index), query, SparqlNTriples)) + ntriples <- client.query(Set(index), query, SparqlNTriples) graph <- IO.fromEither(Graph(ntriples.value.copy(rootNode = traceId))) compacted <- graph.toCompactedJsonLd(ctx) } yield compacted diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala index 458e2af2c4..f6996804d9 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala @@ -3,7 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files import akka.actor.typed.ActorSystem import akka.actor.{ActorSystem => ClassicActorSystem} import akka.http.scaladsl.model.ContentTypes.`application/octet-stream` -import akka.http.scaladsl.model.{ContentType, HttpEntity, Uri} +import akka.http.scaladsl.model._ import cats.effect.{Clock, ContextShift, IO, Timer} import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache @@ -20,7 +20,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.schemas.{files => fileSchema} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.{RemoteDiskStorageConfig, StorageTypeConfig} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.{StorageFetchRejection, StorageIsDeprecated} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.{DifferentStorageType, InvalidStorageType, StorageFetchRejection, StorageIsDeprecated} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{DigestAlgorithm, Storage, StorageRejection, StorageType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.{FetchAttributeRejection, FetchFileRejection, SaveFileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations._ @@ -96,7 +96,7 @@ final class Files( tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO + pc <- fetchContext.onCreate(projectRef) iri <- generateId(pc) _ <- test(CreateFile(iri, projectRef, testStorageRef, testStorageType, testAttributes, caller.subject, tag)) (storageRef, storage) <- fetchActiveStorage(storageId, projectRef, pc) @@ -159,7 +159,7 @@ final class Files( tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO + pc <- fetchContext.onCreate(projectRef) iri <- generateId(pc) res <- createLink(iri, projectRef, pc, storageId, filename, mediaType, path, tag) } yield res @@ -197,6 +197,59 @@ final class Files( } yield res }.span("createLink") + // TODO comments + def copyTo( + sourceId: FileId, + dest: CopyFileDestination + )(implicit c: Caller): IO[FileResource] = { + for { + _ <- logger.info(s"Fetching source file") + (file, sourceDesc, sourceEntity) <- fetchSourceFile(sourceId) + _ <- logger.info(s"Fetched source file, fetching destination storage") + (pc, storageRef, storage) <- fetchDestinationStorage(dest) + _ <- logger.info(s"Fetched destination storage, validating storage type") + _ <- validateStorageTypeForCopy(file.storageType, storage) + iri <- dest.fileId.fold(generateId(pc))(_.expandIri(fetchContext.onCreate).map(_._1)) + _ <- logger.info(s"Validated storage type, saving file") + destinationDesc <- FileDescription(dest.filename.getOrElse(sourceDesc.filename), sourceDesc.mediaType) + attributes <- saveFile(iri, storage, destinationDesc, sourceEntity) + _ <- logger.info(s"Saved file, evaluating creation command destination storage") + res <- eval(CreateFile(iri, dest.project, storageRef, storage.tpe, attributes, c.subject, dest.tag)) + } yield res + }.span("copyFile") + + private def fetchSourceFile(id: FileId)(implicit c: Caller) = + for { + file <- fetch(id) + (iri, _) <- id.expandIri(fetchContext.onRead) + sourceStorage <- storages.fetch(file.value.storage, id.project) + _ <- validateAuth(id.project, sourceStorage.value.storageValue.readPermission) + attributes = file.value.attributes + sourceBytes <- fetchFile(sourceStorage.value, attributes, file.id) + bodyPartEntity = + HttpEntity(attributes.mediaType.getOrElse(ContentTypes.NoContentType), attributes.bytes, sourceBytes) + // TODO should this be strict? + multipartEntity = + Multipart.FormData(Multipart.FormData.BodyPart("file", bodyPartEntity, Map("filename" -> attributes.filename))) + (description, sourceEntity) <- extractFormData(iri, sourceStorage.value, multipartEntity.toEntity()) + } yield (file.value, description, sourceEntity) + + private def fetchDestinationStorage(dest: CopyFileDestination)(implicit c: Caller) = + for { + pc <- fetchContext.onCreate(dest.project) + (destStorageRef, destStorage) <- fetchActiveStorage(dest.storage, dest.project, pc) + } yield (pc, destStorageRef, destStorage) + + private def validateStorageTypeForCopy(source: StorageType, destination: Storage): IO[Unit] = + IO.raiseWhen(source == StorageType.S3Storage)( + WrappedStorageRejection( + InvalidStorageType(destination.id, source, Set(StorageType.DiskStorage, StorageType.RemoteDiskStorage)) + ) + ) >> + IO.raiseUnless(source == destination.tpe)( + WrappedStorageRejection(DifferentStorageType(destination.id, found = destination.tpe, expected = source)) + ) + /** * Update an existing file * @@ -458,22 +511,35 @@ final class Files( private def extractFileAttributes(iri: Iri, entity: HttpEntity, storage: Storage): IO[FileAttributes] = for { - storageAvailableSpace <- storage.storageValue.capacity.fold(IO.none[Long]) { capacity => - storagesStatistics - .get(storage.id, storage.project) - .redeem( - _ => Some(capacity), - stat => Some(capacity - stat.spaceUsed) - ) - } - (description, source) <- formDataExtractor(iri, entity, storage.storageValue.maxFileSize, storageAvailableSpace) - attributes <- SaveFile(storage, remoteDiskStorageClient, config) - .apply(description, source) - .adaptError { case e: SaveFileRejection => SaveRejection(iri, storage.id, e) } + (description, source) <- extractFormData(iri, storage, entity) + attributes <- saveFile(iri, storage, description, source) } yield attributes + private def extractFormData(iri: Iri, storage: Storage, entity: HttpEntity): IO[(FileDescription, BodyPartEntity)] = + for { + storageAvailableSpace <- fetchStorageAvailableSpace(storage) + (description, source) <- formDataExtractor(iri, entity, storage.storageValue.maxFileSize, storageAvailableSpace) + } yield (description, source) + + private def saveFile(iri: Iri, storage: Storage, description: FileDescription, source: BodyPartEntity) = + SaveFile(storage, remoteDiskStorageClient, config) + .apply(description, source) + .adaptError { case e: SaveFileRejection => SaveRejection(iri, storage.id, e) } + + private def fetchStorageAvailableSpace(storage: Storage): IO[Option[Long]] = + storage.storageValue.capacity.fold(IO.none[Long]) { capacity => + storagesStatistics + .get(storage.id, storage.project) + .redeem( + _ => Some(capacity), + stat => Some(capacity - stat.spaceUsed) + ) + } + private def expandStorageIri(segment: IdSegment, pc: ProjectContext): IO[Iri] = - Storages.expandIri(segment, pc).mapError(WrappedStorageRejection).toCatsIO + Storages.expandIri(segment, pc).adaptError { case s: StorageRejection => + WrappedStorageRejection(s) + } private def generateId(pc: ProjectContext)(implicit uuidF: UUIDF): IO[Iri] = uuidF().toCatsIO.map(uuid => pc.base.iri / uuid.toString) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala index ea0c4e0c5c..9abccc76cb 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala @@ -10,6 +10,7 @@ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, MultipartUnmars import akka.stream.scaladsl.{Keep, Sink} import cats.effect.{ContextShift, IO} import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.{FileUtils, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileDescription @@ -44,6 +45,8 @@ sealed trait FormDataExtractor { } object FormDataExtractor { + private val log = Logger[FormDataExtractor] + private val fieldName: String = "file" private val defaultContentType: ContentType.Binary = ContentTypes.`application/octet-stream` @@ -91,6 +94,7 @@ object FormDataExtractor { case Unmarshaller.NoContentException => WrappedAkkaRejection(RequestEntityExpectedRejection) case x: UnsupportedContentTypeException => + log.info(s"Supported media type is ${x.supported}").unsafeRunSync() WrappedAkkaRejection(UnsupportedRequestContentTypeRejection(x.supported, x.actualContentType)) case x: IllegalArgumentException => WrappedAkkaRejection(ValidationRejection(Option(x.getMessage).getOrElse(""), Some(x))) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/CopyFileDestination.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/CopyFileDestination.scala new file mode 100644 index 0000000000..54aa46de74 --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/CopyFileDestination.scala @@ -0,0 +1,13 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model + +import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag + +final case class CopyFileDestination( + project: ProjectRef, + fileId: Option[FileId], + storage: Option[IdSegment], + tag: Option[UserTag], + filename: Option[String] +) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala index 426d9334e2..65c61683a4 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala @@ -28,7 +28,7 @@ import scala.annotation.nowarn * @param mediaType * the optional media type of the file * @param bytes - * the size of the file file in bytes + * the size of the file in bytes * @param digest * the digest information of the file * @param origin diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala index b6d4a6b855..a55f658f1f 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toCatsIOOps import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileId.iriExpander import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.InvalidFileId import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri @@ -10,17 +9,17 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegment, IdSegmentRef} import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectContext import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} -import monix.bio.{IO => BIO} final case class FileId(id: IdSegmentRef, project: ProjectRef) { - def expandIri(fetchContext: ProjectRef => BIO[FileRejection, ProjectContext]): IO[(Iri, ProjectContext)] = - fetchContext(project).flatMap(pc => iriExpander(id.value, pc).map(iri => (iri, pc))).toCatsIO + def expandIri(fetchContext: ProjectRef => IO[ProjectContext]): IO[(Iri, ProjectContext)] = + fetchContext(project).flatMap(pc => iriExpander(id.value, pc).map(iri => (iri, pc))) } object FileId { def apply(ref: ResourceRef, project: ProjectRef): FileId = FileId(IdSegmentRef(ref), project) def apply(id: IdSegment, tag: UserTag, project: ProjectRef): FileId = FileId(IdSegmentRef(id, tag), project) def apply(id: IdSegment, rev: Int, project: ProjectRef): FileId = FileId(IdSegmentRef(id, rev), project) + def apply(id: IdSegment, project: ProjectRef): FileId = FileId(IdSegmentRef(id), project) val iriExpander: ExpandIri[InvalidFileId] = new ExpandIri(InvalidFileId.apply) } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala index 1763ee67fb..c4043f728e 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala @@ -16,6 +16,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfRejectionHandler.all._ +import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.syntax.httpResponseFieldsSyntax import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef @@ -150,6 +151,12 @@ object FileRejection { s"Linking a file '$id' cannot be performed without a 'filename' or a 'path' that does not end with a filename." ) + /** + * Rejection returned when attempting to fetch a file and including both the target tag and revision. + */ + final case class InvalidFileLookup(id: IdSegment) + extends FileRejection(s"Only one of 'tag' and 'rev' can be used to lookup file '$id'.") + /** * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that does not * contain a ''file'' fieldName diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/CopyFilePayload.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/CopyFilePayload.scala new file mode 100644 index 0000000000..9458d89e77 --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/CopyFilePayload.scala @@ -0,0 +1,37 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes + +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileId +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.InvalidFileLookup +import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import io.circe.Decoder + +final case class CopyFilePayload( + destFilename: Option[String], + sourceProj: ProjectRef, + sourceFile: IdSegment, + sourceTag: Option[UserTag], + sourceRev: Option[Int] +) { + def toSourceFileId: Either[InvalidFileLookup, FileId] = (sourceTag, sourceRev) match { + case (Some(tag), None) => Right(FileId(sourceFile, tag, sourceProj)) + case (None, Some(rev)) => Right(FileId(sourceFile, rev, sourceProj)) + case (None, None) => Right(FileId(sourceFile, sourceProj)) + case (Some(_), Some(_)) => Left(InvalidFileLookup(sourceFile)) + } +} + +object CopyFilePayload { + + implicit val dec: Decoder[CopyFilePayload] = Decoder.instance { cur => + val source = cur.downField("source") + for { + destFilename <- cur.get[Option[String]]("destinationFilename") + sourceProj <- source.get[ProjectRef]("projectRef") + sourceFileId <- source.get[String]("fileId").map(IdSegment(_)) + sourceTag <- source.get[Option[UserTag]]("tag") + sourceRev <- source.get[Option[Int]]("rev") + } yield CopyFilePayload(destFilename, sourceProj, sourceFileId, sourceTag, sourceRev) + } +} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala index 4a7c69a9dd..572f498b3f 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala @@ -6,10 +6,11 @@ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.model.{ContentType, MediaRange} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ +import cats.data.EitherT import cats.effect.IO import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{File, FileId, FileRejection} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{CopyFileDestination, File, FileId, FileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.permissions.{read => Read, write => Write} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes.FilesRoutes._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.{schemas, FileResource, Files} @@ -18,8 +19,8 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck -import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling +import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaSchemeDirectives} import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.AuthorizationFailed import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig @@ -28,6 +29,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import io.circe.Decoder import io.circe.generic.extras.Configuration @@ -72,9 +74,9 @@ final class FilesRoutes( (baseUriPrefix(baseUri.prefix) & replaceUri("files", schemas.files)) { pathPrefix("files") { extractCaller { implicit caller => - resolveProjectRef.apply { ref => + resolveProjectRef.apply { projectRef => implicit class IndexOps(io: IO[FileResource]) { - def index(m: IndexingMode): IO[FileResource] = io.flatTap(self.index(ref, _, m)) + def index(m: IndexingMode): IO[FileResource] = io.flatTap(self.index(projectRef, _, m)) } concat( @@ -88,30 +90,68 @@ final class FilesRoutes( emit( Created, files - .createLink(storage, ref, filename, mediaType, path, tag) + .createLink(storage, projectRef, filename, mediaType, path, tag) .index(mode) .attemptNarrow[FileRejection] ) }, + // Create a file by copying from another project, without id segment + entity(as[CopyFilePayload]) { c: CopyFilePayload => + val copyTo = CopyFileDestination(projectRef, None, storage, tag, c.destFilename) + + emit(Created, copyFile(projectRef, mode, c, copyTo)) + }, // Create a file without id segment extractRequestEntity { entity => emit( Created, - files.create(storage, ref, entity, tag).index(mode).attemptNarrow[FileRejection] + files.create(storage, projectRef, entity, tag).index(mode).attemptNarrow[FileRejection] ) } ) } }, (idSegment & indexingMode) { (id, mode) => - val fileId = FileId(id, ref) + val fileId = FileId(id, projectRef) concat( pathEndOrSingleSlash { operationName(s"$prefixSegment/files/{org}/{project}/{id}") { concat( (put & pathEndOrSingleSlash) { - parameters("rev".as[Int].?, "storage".as[IdSegment].?, "tag".as[UserTag].?) { - case (None, storage, tag) => + concat( + // Create a file by copying from another project + parameters("storage".as[IdSegment].?, "tag".as[UserTag].?) { case (destStorage, destTag) => + entity(as[CopyFilePayload]) { c: CopyFilePayload => + val copyTo = + CopyFileDestination(projectRef, Some(fileId), destStorage, destTag, c.destFilename) + + emit(Created, copyFile(projectRef, mode, c, copyTo)) + } + }, + parameters("rev".as[Int], "storage".as[IdSegment].?, "tag".as[UserTag].?) { + case (rev, storage, tag) => + concat( + // Update a Link + entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => + emit( + files + .updateLink(fileId, storage, filename, mediaType, path, rev, tag) + .index(mode) + .attemptNarrow[FileRejection] + ) + }, + // Update a file + extractRequestEntity { entity => + emit( + files + .update(fileId, storage, rev, entity, tag) + .index(mode) + .attemptNarrow[FileRejection] + ) + } + ) + }, + parameters("storage".as[IdSegment].?, "tag".as[UserTag].?) { case (storage, tag) => concat( // Link a file with id segment entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => @@ -127,36 +167,19 @@ final class FilesRoutes( extractRequestEntity { entity => emit( Created, - files.create(fileId, storage, entity, tag).index(mode).attemptNarrow[FileRejection] - ) - } - ) - case (Some(rev), storage, tag) => - concat( - // Update a Link - entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => - emit( files - .updateLink(fileId, storage, filename, mediaType, path, rev, tag) - .index(mode) - .attemptNarrow[FileRejection] - ) - }, - // Update a file - extractRequestEntity { entity => - emit( - files - .update(fileId, storage, rev, entity, tag) + .create(fileId, storage, entity, tag) .index(mode) .attemptNarrow[FileRejection] ) } ) - } + } + ) }, // Deprecate a file (delete & parameter("rev".as[Int])) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { emit( files .deprecate(fileId, rev) @@ -169,7 +192,7 @@ final class FilesRoutes( // Fetch a file (get & idSegmentRef(id)) { id => - emitOrFusionRedirect(ref, id, fetch(FileId(id, ref))) + emitOrFusionRedirect(projectRef, id, fetch(FileId(id, projectRef))) } ) } @@ -178,9 +201,9 @@ final class FilesRoutes( operationName(s"$prefixSegment/files/{org}/{project}/{id}/tags") { concat( // Fetch a file tags - (get & idSegmentRef(id) & pathEndOrSingleSlash & authorizeFor(ref, Read)) { id => + (get & idSegmentRef(id) & pathEndOrSingleSlash & authorizeFor(projectRef, Read)) { id => emit( - fetchMetadata(FileId(id, ref)) + fetchMetadata(FileId(id, projectRef)) .map(_.value.tags) .attemptNarrow[FileRejection] .rejectOn[FileNotFound] @@ -188,7 +211,7 @@ final class FilesRoutes( }, // Tag a file (post & parameter("rev".as[Int]) & pathEndOrSingleSlash) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { entity(as[Tag]) { case Tag(tagRev, tag) => emit( Created, @@ -199,7 +222,7 @@ final class FilesRoutes( }, // Delete a tag (tagLabel & delete & parameter("rev".as[Int]) & pathEndOrSingleSlash & authorizeFor( - ref, + projectRef, Write )) { (tag, rev) => emit( @@ -214,7 +237,7 @@ final class FilesRoutes( } }, (pathPrefix("undeprecate") & put & parameter("rev".as[Int])) { rev => - authorizeFor(ref, Write).apply { + authorizeFor(projectRef, Write).apply { emit( files .undeprecate(fileId, rev) @@ -232,6 +255,16 @@ final class FilesRoutes( } } + private def copyFile(projectRef: ProjectRef, mode: IndexingMode, c: CopyFilePayload, copyTo: CopyFileDestination)( + implicit caller: Caller + ): IO[Either[FileRejection, FileResource]] = + (for { + _ <- EitherT.right(aclCheck.authorizeForOr(c.sourceProj, Read)(AuthorizationFailed(c.sourceProj.project, Read))) + sourceFileId <- EitherT.fromEither[IO](c.toSourceFileId) + result <- EitherT(files.copyTo(sourceFileId, copyTo).attemptNarrow[FileRejection]) + _ <- EitherT.right[FileRejection](index(projectRef, result, mode)) + } yield result).value + def fetch(id: FileId)(implicit caller: Caller): Route = (headerValueByType(Accept) & varyAcceptHeaders) { case accept if accept.mediaRanges.exists(metadataMediaRanges.contains) => diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala index 0e9e33aa60..1506280e97 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala @@ -66,7 +66,7 @@ final class Storages private ( source: Json )(implicit caller: Caller): IO[StorageResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO + pc <- fetchContext.onCreate(projectRef) (iri, storageFields) <- sourceDecoder(projectRef, pc, source).toCatsIO res <- eval(CreateStorage(iri, projectRef, storageFields, source, caller.subject)) _ <- unsetPreviousDefaultIfRequired(projectRef, res) @@ -89,8 +89,8 @@ final class Storages private ( source: Json )(implicit caller: Caller): IO[StorageResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(projectRef) + iri <- expandIri(id, pc) storageFields <- sourceDecoder(projectRef, pc, iri, source).toCatsIO res <- eval(CreateStorage(iri, projectRef, storageFields, source, caller.subject)) _ <- unsetPreviousDefaultIfRequired(projectRef, res) @@ -113,8 +113,8 @@ final class Storages private ( storageFields: StorageFields )(implicit caller: Caller): IO[StorageResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(projectRef) + iri <- expandIri(id, pc) source = storageFields.toJson(iri) res <- eval(CreateStorage(iri, projectRef, storageFields, source, caller.subject)) _ <- unsetPreviousDefaultIfRequired(projectRef, res) @@ -149,8 +149,8 @@ final class Storages private ( unsetPreviousDefault: Boolean )(implicit caller: Caller): IO[StorageResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) storageFields <- sourceDecoder(projectRef, pc, iri, source).toCatsIO res <- eval(UpdateStorage(iri, projectRef, storageFields, source, rev, caller.subject)) _ <- IO.whenA(unsetPreviousDefault)(unsetPreviousDefaultIfRequired(projectRef, res)) @@ -176,8 +176,8 @@ final class Storages private ( storageFields: StorageFields )(implicit caller: Caller): IO[StorageResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) source = storageFields.toJson(iri) res <- eval(UpdateStorage(iri, projectRef, storageFields, source, rev, caller.subject)) _ <- unsetPreviousDefaultIfRequired(projectRef, res) @@ -206,8 +206,8 @@ final class Storages private ( rev: Int )(implicit subject: Subject): IO[StorageResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(TagStorage(iri, projectRef, tagRev, tag, rev, subject)) } yield res }.span("tagStorage") @@ -228,8 +228,8 @@ final class Storages private ( rev: Int )(implicit subject: Subject): IO[StorageResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(DeprecateStorage(iri, projectRef, rev, subject)) } yield res }.span("deprecateStorage") @@ -260,8 +260,8 @@ final class Storages private ( */ def fetch(id: IdSegmentRef, project: ProjectRef): IO[StorageResource] = { for { - pc <- fetchContext.onRead(project).toCatsIO - iri <- expandIri(id.value, pc).toCatsIO + pc <- fetchContext.onRead(project) + iri <- expandIri(id.value, pc) notFound = StorageNotFound(iri, project) state <- id match { case Latest(_) => log.stateOr(project, iri, notFound) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala index 523ef630b6..024dd62f21 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala @@ -144,10 +144,11 @@ object StorageRejection { extends StorageRejection(s"Storage ${id.fold("")(id => s"'$id'")} has invalid JSON-LD payload.") /** - * Rejection returned when attempting to create a storage with an id that already exists. + * Signals an attempt to update/create a storage based on a previous revision with a different storage type * * @param id - * the storage identifier + * @param found + * @param expected */ final case class DifferentStorageType(id: Iri, found: StorageType, expected: StorageType) extends StorageRejection(s"Storage '$id' is of type '$found' and can't be updated to be a '$expected' .") diff --git a/delta/plugins/storage/src/test/resources/errors/tag-and-rev-copy-error.json b/delta/plugins/storage/src/test/resources/errors/tag-and-rev-copy-error.json new file mode 100644 index 0000000000..111259dff1 --- /dev/null +++ b/delta/plugins/storage/src/test/resources/errors/tag-and-rev-copy-error.json @@ -0,0 +1,5 @@ +{ + "@context" : "https://bluebrain.github.io/nexus/contexts/error.json", + "@type" : "InvalidFileLookup", + "reason" : "Only one of 'tag' and 'rev' can be used to lookup file '{{fileId}}'." +} diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala index 9efb9fafc2..2d5d669b2c 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala @@ -12,7 +12,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePat import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label +import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.testkit.scalatest.EitherValues import ch.epfl.bluebrain.nexus.testkit.scalatest.bio.BIOValues import monix.bio.Task @@ -25,25 +25,28 @@ trait FileFixtures extends EitherValues with BIOValues { self: Suite => - val uuid = UUID.fromString("8249ba90-7cc6-4de5-93a1-802c04200dcc") - val uuid2 = UUID.fromString("12345678-7cc6-4de5-93a1-802c04200dcc") - val ref = Ref.of[Task, UUID](uuid).accepted - implicit val uuidF: UUIDF = UUIDF.fromRef(ref) - val org = Label.unsafe("org") - val orgDeprecated = Label.unsafe("org-deprecated") - val project = ProjectGen.project("org", "proj", base = nxv.base, mappings = ApiMappings("file" -> schemas.files)) - val deprecatedProject = ProjectGen.project("org", "proj-deprecated") - val projectWithDeprecatedOrg = ProjectGen.project("org-deprecated", "other-proj") - val projectRef = project.ref - val diskId2 = nxv + "disk2" - val file1 = nxv + "file1" - val file2 = nxv + "file2" - val fileTagged = nxv + "fileTagged" - val fileTagged2 = nxv + "fileTagged2" - val file1Encoded = UrlUtils.encode(file1.toString) - val encodeId = (id: String) => UrlUtils.encode((nxv + id).toString) - val generatedId = project.base.iri / uuid.toString - val generatedId2 = project.base.iri / uuid2.toString + val uuid = UUID.fromString("8249ba90-7cc6-4de5-93a1-802c04200dcc") + val uuid2 = UUID.fromString("12345678-7cc6-4de5-93a1-802c04200dcc") + val uuidOrg2 = UUID.fromString("66666666-7cc6-4de5-93a1-802c04200dcc") + val ref = Ref.of[Task, UUID](uuid).accepted + implicit val uuidF: UUIDF = UUIDF.fromRef(ref) + val org = Label.unsafe("org") + val org2 = Label.unsafe("org2") + val project = ProjectGen.project(org.value, "proj", base = nxv.base, mappings = ApiMappings("file" -> schemas.files)) + val project2 = + ProjectGen.project(org2.value, "proj2", base = nxv.base, mappings = ApiMappings("file" -> schemas.files)) + val deprecatedProject = ProjectGen.project("org", "proj-deprecated") + val projectRef = project.ref + val projectRefOrg2 = project2.ref + val diskId2 = nxv + "disk2" + val file1 = nxv + "file1" + val file2 = nxv + "file2" + val fileTagged = nxv + "fileTagged" + val fileTagged2 = nxv + "fileTagged2" + val file1Encoded = UrlUtils.encode(file1.toString) + val encodeId = (id: String) => UrlUtils.encode((nxv + id).toString) + val generatedId = project.base.iri / uuid.toString + val generatedId2 = project.base.iri / uuid2.toString val content = "file content" val path = AbsolutePath(JavaFiles.createTempDirectory("files")).rightValue @@ -56,12 +59,17 @@ trait FileFixtures extends EitherValues with BIOValues { _ <- ref.set(old) } yield t).accepted - def attributes(filename: String = "file.txt", size: Long = 12, id: UUID = uuid): FileAttributes = { + def attributes( + filename: String = "file.txt", + size: Long = 12, + id: UUID = uuid, + projRef: ProjectRef = projectRef + ): FileAttributes = { val uuidPathSegment = id.toString.take(8).mkString("/") FileAttributes( id, - s"file://$path/org/proj/$uuidPathSegment/$filename", - Uri.Path(s"org/proj/$uuidPathSegment/$filename"), + s"file://$path/${projRef.toString}/$uuidPathSegment/$filename", + Uri.Path(s"${projRef.toString}/$uuidPathSegment/$filename"), filename, Some(`text/plain(UTF-8)`), size, diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala index 201910dac0..2494ac751b 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala @@ -11,8 +11,8 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.RemoteContextResolutionFixt import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.Digest.NotComputedDigest import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin.Storage import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{FileAttributes, FileId, FileRejection} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotFound +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{CopyFileDestination, FileAttributes, FileId, FileRejection} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.{DifferentStorageType, StorageNotFound} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageType.{RemoteDiskStorage => RemoteStorageType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{StorageRejection, StorageStatEntry, StorageType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.AkkaSourceHelpers @@ -41,8 +41,8 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture import ch.epfl.bluebrain.nexus.testkit.remotestorage.RemoteStorageDocker import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec import monix.execution.Scheduler -import org.scalatest.{Assertion, DoNotDiscover} import org.scalatest.concurrent.Eventually +import org.scalatest.{Assertion, DoNotDiscover} import java.net.URLDecoder @@ -93,13 +93,14 @@ class FilesSpec(docker: RemoteStorageDocker) val storage: IdSegment = nxv + "other-storage" val fetchContext = FetchContextDummy( - Map(project.ref -> project.context), + Map(project.ref -> project.context, project2.ref -> project2.context), Set(deprecatedProject.ref) ) val aclCheck = AclSimpleCheck( (Anonymous, AclAddress.Root, Set(Permissions.resources.read)), (bob, AclAddress.Project(projectRef), Set(diskFields.readPermission.value, diskFields.writePermission.value)), + (bob, AclAddress.Project(projectRefOrg2), Set(diskFields.readPermission.value, diskFields.writePermission.value)), (alice, AclAddress.Project(projectRef), Set(otherRead, otherWrite)) ).accepted @@ -152,10 +153,12 @@ class FilesSpec(docker: RemoteStorageDocker) "create storages for files" in { val payload = diskFieldsJson deepMerge json"""{"capacity": 320, "maxFileSize": 300, "volume": "$path"}""" storages.create(diskId, projectRef, payload).accepted + storages.create(diskId, projectRefOrg2, payload).accepted val payload2 = json"""{"@type": "RemoteDiskStorage", "endpoint": "${docker.hostConfig.endpoint}", "folder": "${RemoteStorageDocker.BucketName}", "readPermission": "$otherRead", "writePermission": "$otherWrite", "maxFileSize": 300, "default": false}""" storages.create(remoteId, projectRef, payload2).accepted + storages.create(remoteId, projectRefOrg2, payload2).accepted } "succeed with the id passed" in { @@ -437,6 +440,56 @@ class FilesSpec(docker: RemoteStorageDocker) } } + "copying a file" should { + + "succeed from disk storage based on a tag" in { + val newFileId = genString() + val destination = CopyFileDestination(projectRefOrg2, Some(FileId(newFileId, projectRefOrg2)), None, None, None) + val expectedFilename = "myfile.txt" + val expectedAttr = attributes(filename = expectedFilename, projRef = projectRefOrg2) + val expected = mkResource(nxv + newFileId, projectRefOrg2, diskRev, expectedAttr) + + val actual = files.copyTo(FileId("file1", tag, projectRef), destination).accepted + actual shouldEqual expected + + val fetched = files.fetch(FileId(newFileId, projectRefOrg2)).accepted + fetched shouldEqual expected + } + + "succeed from disk storage based on a rev and should tag the new file" in { + val (newFileId, newTag) = (genString(), UserTag.unsafe(genString())) + val destination = + CopyFileDestination(projectRefOrg2, Some(FileId(newFileId, projectRefOrg2)), None, Some(newTag), None) + val expectedFilename = "file.txt" + val expectedAttr = attributes(filename = expectedFilename, projRef = projectRefOrg2) + val expected = mkResource(nxv + newFileId, projectRefOrg2, diskRev, expectedAttr, tags = Tags(newTag -> 1)) + + val actual = files.copyTo(FileId("file1", 2, projectRef), destination).accepted + actual shouldEqual expected + + val fetchedByTag = files.fetch(FileId(newFileId, newTag, projectRefOrg2)).accepted + fetchedByTag shouldEqual expected + } + + "reject if the source file doesn't exist" in { + val destination = CopyFileDestination(projectRefOrg2, None, None, None, None) + files.copyTo(fileIdIri(nxv + "other"), destination).rejectedWith[FileNotFound] + } + + "reject if the destination storage doesn't exist" in { + val destination = CopyFileDestination(projectRefOrg2, None, Some(storage), None, None) + files.copyTo(fileId("file1"), destination).rejected shouldEqual + WrappedStorageRejection(StorageNotFound(storageIri, projectRefOrg2)) + } + + "reject if copying between different storage types" in { + val expectedError = DifferentStorageType(remoteIdIri, StorageType.RemoteDiskStorage, StorageType.DiskStorage) + val destination = CopyFileDestination(projectRefOrg2, None, Some(remoteId), None, None) + files.copyTo(FileId("file1", projectRef), destination).rejected shouldEqual + WrappedStorageRejection(expectedError) + } + } + "deleting a tag" should { "succeed" in { val expected = mkResource(file1, projectRef, diskRev, attributes(), rev = 4) diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala index 39ceb150b1..9dc3bb0904 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala @@ -39,12 +39,15 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authent import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.testkit.TestHelpers.jsonContentOf -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import ch.epfl.bluebrain.nexus.testkit.errors.files.FileErrors.{fileAlreadyExistsError, fileIsNotDeprecatedError} import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues import io.circe.Json +import io.circe.syntax.KeyOps import org.scalatest._ +import java.util.UUID + class FilesRoutesSpec extends BaseRouteSpec with CancelAfterFailure @@ -87,7 +90,7 @@ class FilesRoutesSpec private val asWriter = addCredentials(OAuth2BearerToken("writer")) private val asS3Writer = addCredentials(OAuth2BearerToken("s3writer")) - private val fetchContext = FetchContextDummy(Map(project.ref -> project.context)) + private val fetchContext = FetchContextDummy(Map(project.ref -> project.context, project2.ref -> project2.context)) private val s3Read = Permission.unsafe("s3/read") private val s3Write = Permission.unsafe("s3/write") @@ -113,7 +116,7 @@ class FilesRoutesSpec private val aclCheck = AclSimpleCheck().accepted - lazy val storages: Storages = Storages( + lazy val storages: Storages = Storages( fetchContext.mapRejection(StorageRejection.ProjectContextRejection), ResolverContextResolution(rcr), IO.pure(allowedPerms.toSet), @@ -122,7 +125,7 @@ class FilesRoutesSpec StoragesConfig(eventLogConfig, pagination, stCfg), ServiceAccount(User("nexus-sa", Label.unsafe("sa"))) ).accepted - lazy val files: Files = + lazy val files: Files = Files( fetchContext.mapRejection(FileRejection.ProjectContextRejection), aclCheck, @@ -133,8 +136,12 @@ class FilesRoutesSpec FilesConfig(eventLogConfig, MediaTypeDetectorConfig.Empty), remoteDiskStorageClient )(ceClock, uuidF, timer, contextShift, typedSystem) - private val groupDirectives = - DeltaSchemeDirectives(fetchContext, ioFromMap(uuid -> projectRef.organization), ioFromMap(uuid -> projectRef)) + private val groupDirectives = + DeltaSchemeDirectives( + fetchContext, + ioFromMap(uuid -> projectRef.organization, uuidOrg2 -> projectRefOrg2.organization), + ioFromMap(uuid -> projectRef, uuidOrg2 -> projectRefOrg2) + ) private lazy val routes = routesWithIdentities(identities) private def routesWithIdentities(identities: Identities) = @@ -163,6 +170,12 @@ class FilesRoutesSpec .create(dId, projectRef, diskFieldsJson deepMerge defaults deepMerge json"""{"capacity":5000}""")(callerWriter) .void .accepted + storages + .create(dId, projectRefOrg2, diskFieldsJson deepMerge defaults deepMerge json"""{"capacity":5000}""")( + callerWriter + ) + .void + .accepted } "File routes" should { @@ -349,6 +362,56 @@ class FilesRoutesSpec } } + "copy a file" in { + givenAFileInProject(projectRef.toString) { oldFileId => + val newFileId = genString() + val json = Json.obj("source" := Json.obj("projectRef" := projectRef, "fileId" := oldFileId)) + + Put(s"/v1/files/${projectRefOrg2.toString}/$newFileId", json.toEntity) ~> asWriter ~> routes ~> check { + status shouldEqual StatusCodes.Created + val expectedId = project2.base.iri / newFileId + val attr = attributes(filename = oldFileId) + response.asJson shouldEqual fileMetadata(projectRefOrg2, expectedId, attr, diskIdRev) + } + } + } + + "copy a file with generated new Id" in { + val fileCopyUUId = UUID.randomUUID() + withUUIDF(fileCopyUUId) { + givenAFileInProject(projectRef.toString) { oldFileId => + val json = Json.obj("source" := Json.obj("projectRef" := projectRef, "fileId" := oldFileId)) + + Post(s"/v1/files/${projectRefOrg2.toString}/", json.toEntity) ~> asWriter ~> routes ~> check { + status shouldEqual StatusCodes.Created + val expectedId = project2.base.iri / fileCopyUUId.toString + val attr = attributes(filename = oldFileId, id = fileCopyUUId) + response.asJson shouldEqual fileMetadata(projectRefOrg2, expectedId, attr, diskIdRev) + } + } + } + } + + "reject file copy request if tag and rev and present simultaneously" in { + givenAFileInProject(projectRef.toString) { oldFileId => + val source = Json.obj("projectRef" := projectRef, "fileId" := oldFileId, "tag" := "mytag", "rev" := 3) + val json = Json.obj("source" := source) + + val requests = List( + Put(s"/v1/files/${projectRefOrg2.toString}/${genString()}", json.toEntity), + Post(s"/v1/files/${projectRefOrg2.toString}/", json.toEntity) + ) + + forAll(requests) { req => + req ~> asWriter ~> routes ~> check { + status shouldEqual StatusCodes.BadRequest + response.asJson shouldEqual + jsonContentOf("/errors/tag-and-rev-copy-error.json", "fileId" -> oldFileId) + } + } + } + } + "deprecate a file" in { givenAFile { id => Delete(s"/v1/files/org/proj/$id?rev=1") ~> asWriter ~> routes ~> check { @@ -629,9 +692,11 @@ class FilesRoutesSpec } } - def givenAFile(test: String => Assertion): Assertion = { + def givenAFile(test: String => Assertion): Assertion = givenAFileInProject("org/proj")(test) + + def givenAFileInProject(projRef: String)(test: String => Assertion): Assertion = { val id = genString() - Put(s"/v1/files/org/proj/$id", entity(s"$id")) ~> asWriter ~> routes ~> check { + Put(s"/v1/files/$projRef/$id", entity(s"$id")) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Created } test(id) diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala index 4f6d17cd1e..20f2a3c5a5 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala @@ -32,7 +32,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap +import ch.epfl.bluebrain.nexus.testkit.ce.IOFromMap import io.circe.Json import java.util.UUID diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectives.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectives.scala index a825c210f4..20cb51919c 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectives.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectives.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.directives import akka.http.scaladsl.model.Uri.Path./ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive0, Directive1} +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.sdk.directives.UriDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.QueryParamsUnmarshalling @@ -11,8 +12,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectRejection.ProjectNotFound import ch.epfl.bluebrain.nexus.delta.sdk.projects.{FetchContext, UUIDCache} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import monix.bio.{IO, UIO} -import monix.execution.Scheduler import java.util.UUID import scala.util.Try @@ -27,11 +26,10 @@ import scala.util.Try * fetch a project by its uuid */ final class DeltaSchemeDirectives( - fetchContext: ProjectRef => IO[_, ProjectContext], - fetchOrgByUuid: UUID => UIO[Option[Label]], - fetchProjByUuid: UUID => UIO[Option[ProjectRef]] -)(implicit s: Scheduler) - extends QueryParamsUnmarshalling { + fetchContext: ProjectRef => IO[ProjectContext], + fetchOrgByUuid: UUID => IO[Option[Label]], + fetchProjByUuid: UUID => IO[Option[ProjectRef]] +) extends QueryParamsUnmarshalling { /** * Extracts the organization segment and converts it to UUID. If the conversion is possible, it attempts to fetch the @@ -41,7 +39,7 @@ final class DeltaSchemeDirectives( pathPrefix(Segment).flatMap { segment => Try(UUID.fromString(segment)) .map(uuid => - onSuccess(fetchOrgByUuid(uuid).attempt.runToFuture).flatMap { + onSuccess(fetchOrgByUuid(uuid).attempt.unsafeToFuture()).flatMap { case Right(Some(label)) => provide(label) case _ => label(segment) } @@ -62,7 +60,7 @@ final class DeltaSchemeDirectives( } yield ProjectRef(org, proj) def projectFromUuids: Directive1[ProjectRef] = (uuid & uuid).tflatMap { case (oUuid, pUuid) => - onSuccess(fetchProjByUuid(pUuid).attempt.runToFuture).flatMap { + onSuccess(fetchProjByUuid(pUuid).attempt.unsafeToFuture()).flatMap { case Right(Some(project)) => provide(project) case _ => projectRefFromString(oUuid.toString, pUuid.toString) } @@ -72,7 +70,7 @@ final class DeltaSchemeDirectives( } def projectContext(projectRef: ProjectRef): Directive1[ProjectContext] = - onSuccess(fetchContext(projectRef).attempt.runToFuture).flatMap { + onSuccess(fetchContext(projectRef).attempt.unsafeToFuture()).flatMap { case Right(pc) => provide(pc) case Left(_) => reject() } @@ -83,7 +81,7 @@ final class DeltaSchemeDirectives( */ def iriSegment(projectRef: ProjectRef): Directive1[Iri] = idSegment.flatMap { idSegment => - onSuccess(fetchContext(projectRef).attempt.runToFuture).flatMap { + onSuccess(fetchContext(projectRef).attempt.unsafeToFuture()).flatMap { case Right(pc) => idSegment.toIri(pc.apiMappings, pc.base).map(provide).getOrElse(reject()) case Left(_) => reject() } @@ -127,7 +125,7 @@ final class DeltaSchemeDirectives( * Extract the ''type'' query parameter(s) as Iri */ def types(implicit projectRef: ProjectRef): Directive1[Set[Iri]] = - onSuccess(fetchContext(projectRef).attempt.runToFuture).flatMap { + onSuccess(fetchContext(projectRef).attempt.unsafeToFuture()).flatMap { case Right(projectContext) => implicit val pc: ProjectContext = projectContext parameter("type".as[IriVocab].*).map(_.toSet.map((iriVocab: IriVocab) => iriVocab.value)) @@ -137,31 +135,31 @@ final class DeltaSchemeDirectives( object DeltaSchemeDirectives extends QueryParamsUnmarshalling { - def empty(implicit s: Scheduler): DeltaSchemeDirectives = onlyResolveOrgUuid(_ => UIO.none) + def empty: DeltaSchemeDirectives = onlyResolveOrgUuid(_ => IO.none) - def onlyResolveOrgUuid(fetchOrgByUuid: UUID => UIO[Option[Label]])(implicit s: Scheduler) = new DeltaSchemeDirectives( + def onlyResolveOrgUuid(fetchOrgByUuid: UUID => IO[Option[Label]]) = new DeltaSchemeDirectives( (ref: ProjectRef) => IO.raiseError(ProjectNotFound(ref)), fetchOrgByUuid, - _ => UIO.none + _ => IO.none ) - def onlyResolveProjUuid(fetchProjByUuid: UUID => UIO[Option[ProjectRef]])(implicit s: Scheduler) = + def onlyResolveProjUuid(fetchProjByUuid: UUID => IO[Option[ProjectRef]]) = new DeltaSchemeDirectives( (ref: ProjectRef) => IO.raiseError(ProjectNotFound(ref)), - _ => UIO.none, + _ => IO.none, fetchProjByUuid ) - def apply(fetchContext: FetchContext[_], uuidCache: UUIDCache)(implicit s: Scheduler): DeltaSchemeDirectives = + def apply(fetchContext: FetchContext[_], uuidCache: UUIDCache): DeltaSchemeDirectives = apply(fetchContext, uuidCache.orgLabel, uuidCache.projectRef) - def apply(fetchContext: FetchContext[_])(implicit s: Scheduler): DeltaSchemeDirectives = - new DeltaSchemeDirectives((ref: ProjectRef) => fetchContext.onRead(ref), _ => UIO.none, _ => UIO.none) + def apply(fetchContext: FetchContext[_]): DeltaSchemeDirectives = + new DeltaSchemeDirectives((ref: ProjectRef) => fetchContext.onRead(ref), _ => IO.none, _ => IO.none) def apply( fetchContext: FetchContext[_], - fetchOrgByUuid: UUID => UIO[Option[Label]], - fetchProjByUuid: UUID => UIO[Option[ProjectRef]] - )(implicit s: Scheduler): DeltaSchemeDirectives = + fetchOrgByUuid: UUID => IO[Option[Label]], + fetchProjByUuid: UUID => IO[Option[ProjectRef]] + ): DeltaSchemeDirectives = new DeltaSchemeDirectives((ref: ProjectRef) => fetchContext.onRead(ref), fetchOrgByUuid, fetchProjByUuid) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala index 9092b8419b..8ac97cb9cb 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala @@ -9,14 +9,15 @@ import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.stream.StreamTcpException import akka.util.ByteString -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.toMonixBIOOps +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.{toCatsIOOps, toMonixBIOOps} import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling._ import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient.HttpResult import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError._ import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import io.circe.{Decoder, Json} -import monix.bio.{IO, Task, UIO} +import monix.bio.{IO => BIO, Task, UIO} +import cats.effect.IO import monix.execution.Scheduler import java.net.UnknownHostException @@ -34,13 +35,13 @@ trait HttpClient { */ def apply[A](req: HttpRequest)(handleResponse: PartialFunction[HttpResponse, HttpResult[A]]): HttpResult[A] - def run[A](req: HttpRequest)(handleResponse: PartialFunction[HttpResponse, cats.effect.IO[A]]): HttpResult[A] = - apply(req) { case r if handleResponse.isDefinedAt(r) => handleResponse(r).toBIO[HttpClientError] } + def run[A](req: HttpRequest)(handleResponse: PartialFunction[HttpResponse, IO[A]]): IO[A] = + apply(req) { case r if handleResponse.isDefinedAt(r) => handleResponse(r).toBIO[HttpClientError] }.toCatsIO /** * Execute the argument request and unmarshal the response Json response. */ - def toJson(req: HttpRequest): HttpResult[Json] = + def toJson(req: HttpRequest): HttpResult[Json] = fromJsonTo[Json](req) /** @@ -69,7 +70,7 @@ trait HttpClient { object HttpClient { - type HttpResult[A] = IO[HttpClientError, A] + type HttpResult[A] = BIO[HttpClientError, A] private val acceptEncoding = AcceptEncoding.create(HttpEncodingRange.create(HttpEncodings.gzip), HttpEncodingRange.create(HttpEncodings.deflate)) @@ -103,12 +104,12 @@ object HttpClient { )(implicit httpConfig: HttpClientConfig, as: ActorSystem, scheduler: Scheduler): HttpClient = new HttpClient { - private def decodeResponse(req: HttpRequest, response: HttpResponse): IO[InvalidEncoding, HttpResponse] = { + private def decodeResponse(req: HttpRequest, response: HttpResponse): BIO[InvalidEncoding, HttpResponse] = { val decoder = response.encoding match { - case HttpEncodings.gzip => IO.pure(Coders.Gzip) - case HttpEncodings.deflate => IO.pure(Coders.Deflate) - case HttpEncodings.identity => IO.pure(Coders.NoCoding) - case encoding => IO.raiseError(InvalidEncoding(req, encoding)) + case HttpEncodings.gzip => BIO.pure(Coders.Gzip) + case HttpEncodings.deflate => BIO.pure(Coders.Deflate) + case HttpEncodings.identity => BIO.pure(Coders.NoCoding) + case encoding => BIO.raiseError(InvalidEncoding(req, encoding)) } decoder.map(_.decodeMessage(response)) } @@ -154,7 +155,7 @@ object HttpClient { override def discardBytes[A](req: HttpRequest, returnValue: => A): HttpResult[A] = apply(req) { case resp if resp.status.isSuccess() => - UIO.delay(resp.discardEntityBytes()) >> IO.pure(returnValue) + UIO.delay(resp.discardEntityBytes()) >> BIO.pure(returnValue) } private def consumeEntity[A](req: HttpRequest, resp: HttpResponse): HttpResult[A] = @@ -163,8 +164,8 @@ object HttpClient { resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String) ) .redeemWith( - error => IO.raiseError(HttpUnexpectedError(req, error.getMessage)), - consumedString => IO.raiseError(HttpClientError(req, resp.status, consumedString)) + error => BIO.raiseError(HttpUnexpectedError(req, error.getMessage)), + consumedString => BIO.raiseError(HttpClientError(req, resp.status, consumedString)) ) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/jsonld/ExpandIri.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/jsonld/ExpandIri.scala index b064b38b45..8c15d99bd1 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/jsonld/ExpandIri.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/jsonld/ExpandIri.scala @@ -1,12 +1,13 @@ package ch.epfl.bluebrain.nexus.delta.sdk.jsonld +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.error.Rejection import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegment, IdSegmentRef} import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectContext import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef -import monix.bio.IO -final class ExpandIri[R](val onError: String => R) extends AnyVal { +final class ExpandIri[R <: Rejection](val onError: String => R) extends AnyVal { /** * Expand the given segment to an Iri using the provided project if necessary @@ -15,7 +16,7 @@ final class ExpandIri[R](val onError: String => R) extends AnyVal { * @param projectContext * the project context */ - def apply(segment: IdSegment, projectContext: ProjectContext): IO[R, Iri] = + def apply(segment: IdSegment, projectContext: ProjectContext): IO[Iri] = apply(IdSegmentRef(segment), projectContext).map(_.iri) /** @@ -27,7 +28,7 @@ final class ExpandIri[R](val onError: String => R) extends AnyVal { * @param projectContext * the project context */ - def apply(segment: IdSegmentRef, projectContext: ProjectContext): IO[R, ResourceRef] = + def apply(segment: IdSegmentRef, projectContext: ProjectContext): IO[ResourceRef] = IO.fromOption( segment.value.toIri(projectContext.apiMappings, projectContext.base).map { iri => segment match { @@ -35,7 +36,6 @@ final class ExpandIri[R](val onError: String => R) extends AnyVal { case IdSegmentRef.Revision(_, rev) => ResourceRef.Revision(iri, rev) case IdSegmentRef.Tag(_, tag) => ResourceRef.Tag(iri, tag) } - }, - onError(segment.value.asString) - ) + } + )(onError(segment.value.asString)) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContext.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContext.scala index db4d4de748..2d4de5d78e 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContext.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContext.scala @@ -1,13 +1,12 @@ package ch.epfl.bluebrain.nexus.delta.sdk.projects import akka.http.scaladsl.model.{HttpHeader, StatusCode} +import cats.effect.IO import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sdk.ProjectResource import ch.epfl.bluebrain.nexus.delta.sdk.error.SDKError import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields -import ch.epfl.bluebrain.nexus.delta.sdk.organizations.Organizations -import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.OrganizationRejection +import ch.epfl.bluebrain.nexus.delta.sdk.organizations.{model, Organizations} import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectRejection._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectContext, ProjectRejection} import ch.epfl.bluebrain.nexus.delta.sdk.quotas.Quotas @@ -15,14 +14,13 @@ import ch.epfl.bluebrain.nexus.delta.sdk.quotas.model.QuotaRejection import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import io.circe.{Encoder, JsonObject} -import monix.bio.{IO, UIO} -import scala.collection.concurrent +import scala.reflect.ClassTag /** * Define the rules to fetch project context for read and write operations */ -trait FetchContext[R] { self => +abstract class FetchContext[R <: Throwable: ClassTag] { self => /** * The default api mappings @@ -34,7 +32,7 @@ trait FetchContext[R] { self => * @param ref * the project to fetch the context from */ - def onRead(ref: ProjectRef): IO[R, ProjectContext] + def onRead(ref: ProjectRef): IO[ProjectContext] /** * Fetch context for a create operation @@ -43,7 +41,7 @@ trait FetchContext[R] { self => * @param subject * the current user */ - def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[R, ProjectContext] + def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] /** * Fetch context for a modify operation @@ -52,43 +50,30 @@ trait FetchContext[R] { self => * @param subject * the current user */ - def onModify(ref: ProjectRef)(implicit subject: Subject): IO[R, ProjectContext] - - /** - * Cache onRead operations to avoid unnecessary queries during batch operations like listings - * @return - * a new instance caching onRead calls - */ - def cacheOnReads: FetchContext[R] = new FetchContext[R] { - - private val cache: concurrent.Map[ProjectRef, ProjectContext] = new concurrent.TrieMap - - override def defaultApiMappings: ApiMappings = self.defaultApiMappings - - override def onRead(ref: ProjectRef): IO[R, ProjectContext] = - IO.fromOption(cache.get(ref)).onErrorFallbackTo(self.onRead(ref).tapEval { pc => UIO.delay(cache.put(ref, pc)) }) - - override def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[R, ProjectContext] = self.onCreate(ref) - - override def onModify(ref: ProjectRef)(implicit subject: Subject): IO[R, ProjectContext] = self.onCreate(ref) - } + def onModify(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] /** * Map the rejection to another one * @param f * the function from [[R]] to [[R2]] */ - def mapRejection[R2](f: R => R2): FetchContext[R2] = + def mapRejection[R2 <: Throwable: ClassTag](f: R => R2): FetchContext[R2] = new FetchContext[R2] { override def defaultApiMappings: ApiMappings = self.defaultApiMappings - override def onRead(ref: ProjectRef): IO[R2, ProjectContext] = self.onRead(ref).mapError(f) + override def onRead(ref: ProjectRef): IO[ProjectContext] = self.onRead(ref).adaptError { case r: R => + f(r) + } - override def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[R2, ProjectContext] = - self.onCreate(ref).mapError(f) + override def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] = + self.onCreate(ref).adaptError { case r: R => + f(r) + } - override def onModify(ref: ProjectRef)(implicit subject: Subject): IO[R2, ProjectContext] = - self.onModify(ref).mapError(f) + override def onModify(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] = + self.onModify(ref).adaptError { case r: R => + f(r) + } } } @@ -103,7 +88,7 @@ object FetchContext { /** * The underlying rejection type */ - type E + type E <: Throwable /** * The underlying rejection value @@ -129,17 +114,18 @@ object FetchContext { object ContextRejection { - type Aux[E0] = ContextRejection { type E = E0 } + type Aux[E0 <: Throwable] = ContextRejection { type E = E0 } - def apply[E0: Encoder.AsObject: HttpResponseFields](v: E0): ContextRejection.Aux[E0] = new ContextRejection { - override type E = E0 + def apply[E0 <: Throwable: Encoder.AsObject: HttpResponseFields](v: E0): ContextRejection.Aux[E0] = + new ContextRejection { + override type E = E0 - override def value: E = v + override def value: E = v - override def encoder: Encoder.AsObject[E] = implicitly[Encoder.AsObject[E]] + override def encoder: Encoder.AsObject[E] = implicitly[Encoder.AsObject[E]] - override def responseFields: HttpResponseFields[E] = implicitly[HttpResponseFields[E]] - } + override def responseFields: HttpResponseFields[E] = implicitly[HttpResponseFields[E]] + } } /** @@ -147,51 +133,50 @@ object FetchContext { */ def apply(organizations: Organizations, projects: Projects, quotas: Quotas): FetchContext[ContextRejection] = apply( - organizations.fetchActiveOrganization(_).void.toBIO[OrganizationRejection], + organizations.fetchActiveOrganization(_).void, projects.defaultApiMappings, - projects.fetch(_).toBIO[ProjectRejection], + projects.fetch, quotas ) def apply( - fetchActiveOrganization: Label => IO[OrganizationRejection, Unit], + fetchActiveOrganization: Label => IO[Unit], dam: ApiMappings, - fetchProject: ProjectRef => IO[ProjectRejection, ProjectResource], + fetchProject: ProjectRef => IO[ProjectResource], quotas: Quotas ): FetchContext[ContextRejection] = new FetchContext[ContextRejection] { override def defaultApiMappings: ApiMappings = dam - override def onRead(ref: ProjectRef): IO[ContextRejection, ProjectContext] = - fetchProject(ref) - .tapEval { p => - IO.raiseWhen(p.value.markedForDeletion)(ProjectIsMarkedForDeletion(ref)) - } - .bimap(ContextRejection(_), _.value.context) + override def onRead(ref: ProjectRef): IO[ProjectContext] = + fetchProject(ref).attemptNarrow[ProjectRejection].flatMap { + case Left(rejection) => IO.raiseError(ContextRejection(rejection)) + case Right(project) if project.value.markedForDeletion => IO.raiseError(ProjectIsMarkedForDeletion(ref)) + case Right(project) => IO.pure(project.value.context) + } private def onWrite(ref: ProjectRef) = - fetchProject(ref) - .tapEval { p => - IO.raiseWhen(p.value.markedForDeletion)(ProjectIsMarkedForDeletion(ref)) >> - IO.raiseWhen(p.deprecated)(ProjectIsDeprecated(ref)) - } - .bimap(ContextRejection(_), _.value.context) - - override def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[ContextRejection, ProjectContext] = + fetchProject(ref).attemptNarrow[ProjectRejection].flatMap { + case Left(rejection) => IO.raiseError(ContextRejection(rejection)) + case Right(project) if project.value.markedForDeletion => IO.raiseError(ProjectIsMarkedForDeletion(ref)) + case Right(project) if project.deprecated => IO.raiseError(ProjectIsDeprecated(ref)) + case Right(project) => IO.pure(project.value.context) + } + + override def onCreate(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] = quotas .reachedForResources(ref, subject) - .adaptError { case e: QuotaRejection => ContextRejection(e) } - .toBIO[ContextRejection] >> - onModify(ref) + .adaptError { case e: QuotaRejection => ContextRejection(e) } >> onModify(ref) - override def onModify(ref: ProjectRef)(implicit subject: Subject): IO[ContextRejection, ProjectContext] = + override def onModify(ref: ProjectRef)(implicit subject: Subject): IO[ProjectContext] = for { - _ <- fetchActiveOrganization(ref.organization).mapError(ContextRejection.apply(_)) + _ <- fetchActiveOrganization(ref.organization).adaptError { case rejection: model.OrganizationRejection => + ContextRejection(rejection) + } _ <- quotas .reachedForEvents(ref, subject) .adaptError { case e: QuotaRejection => ContextRejection(e) } - .toBIO[ContextRejection] context <- onWrite(ref) } yield context } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCache.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCache.scala index ea738b50d5..fbd2947ba1 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCache.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCache.scala @@ -1,11 +1,11 @@ package ch.epfl.bluebrain.nexus.delta.sdk.projects -import ch.epfl.bluebrain.nexus.delta.kernel.cache.{CacheConfig, KeyValueStore} +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.cache.{CacheConfig, LocalCache} import ch.epfl.bluebrain.nexus.delta.sdk.organizations.Organizations import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors -import doobie.implicits._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import monix.bio.UIO +import doobie.implicits._ import java.util.UUID @@ -17,41 +17,39 @@ trait UUIDCache { /** * Fetches an org label from an uuid */ - def orgLabel(uuid: UUID): UIO[Option[Label]] + def orgLabel(uuid: UUID): IO[Option[Label]] /** * Fetches a project reference from an uuid */ - def projectRef(uuid: UUID): UIO[Option[ProjectRef]] + def projectRef(uuid: UUID): IO[Option[ProjectRef]] } object UUIDCache { - def apply(projectsConfig: CacheConfig, orgsConfig: CacheConfig, xas: Transactors): UIO[UUIDCache] = + def apply(projectsConfig: CacheConfig, orgsConfig: CacheConfig, xas: Transactors): IO[UUIDCache] = for { - orgsCache <- KeyValueStore.localLRU[UUID, Label](orgsConfig) - projectsCache <- KeyValueStore.localLRU[UUID, ProjectRef](projectsConfig) + orgsCache <- LocalCache.lru[UUID, Label](orgsConfig) + projectsCache <- LocalCache.lru[UUID, ProjectRef](projectsConfig) } yield new UUIDCache { - override def orgLabel(uuid: UUID): UIO[Option[Label]] = + override def orgLabel(uuid: UUID): IO[Option[Label]] = orgsCache.getOrElseAttemptUpdate( uuid, sql"SELECT value->>'label' FROM global_states WHERE type = ${Organizations.entityType} AND value->>'uuid' = ${uuid.toString} " .query[Label] .option - .transact(xas.read) - .hideErrors + .transact(xas.readCE) ) - override def projectRef(uuid: UUID): UIO[Option[ProjectRef]] = + override def projectRef(uuid: UUID): IO[Option[ProjectRef]] = projectsCache.getOrElseAttemptUpdate( uuid, sql"SELECT org, project FROM scoped_states WHERE type = ${Projects.entityType} AND value->>'uuid' = ${uuid.toString} " .query[(Label, Label)] .map { case (o, p) => ProjectRef(o, p) } .option - .transact(xas.read) - .hideErrors + .transact(xas.readCE) ) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala index 5ddbd3f597..93967b30e3 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala @@ -4,7 +4,6 @@ import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.{ExpandIri, JsonLdContent} import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegment, IdSegmentRef} -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectContext import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{InvalidResolution, InvalidResolvedResourceId, InvalidResolverResolution} @@ -40,7 +39,7 @@ final class MultiResolution( )(implicit caller: Caller): IO[MultiResolutionResult[ResourceResolutionReport]] = for { project <- fetchProject(projectRef) - resourceRef <- toCatsIO(expandResourceIri(resourceSegment, project)) + resourceRef <- expandResourceIri(resourceSegment, project) result <- resourceResolution.resolveReport(resourceRef, projectRef).flatMap { case (resourceReport, Some(resourceResult)) => IO.pure(MultiResolutionResult(resourceReport, resourceResult)) @@ -66,8 +65,8 @@ final class MultiResolution( for { project <- fetchProject(projectRef) - resourceRef <- toCatsIO(expandResourceIri(resourceSegment, project)) - resolverId <- toCatsIO(Resolvers.expandIri(resolverSegment, project)) + resourceRef <- expandResourceIri(resourceSegment, project) + resolverId <- Resolvers.expandIri(resolverSegment, project) result <- resourceResolution.resolveReport(resourceRef, projectRef, resolverId).flatMap { case (resourceReport, Some(resourceResult)) => IO.pure(MultiResolutionResult(resourceReport, resourceResult)) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala index 3f6ec9fb81..c5cacc7bab 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala @@ -42,7 +42,7 @@ final class ResolversImpl private ( source: Json )(implicit caller: Caller): IO[ResolverResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO + pc <- fetchContext.onCreate(projectRef) (iri, resolverValue) <- sourceDecoder(projectRef, pc, source).toCatsIO res <- eval(CreateResolver(iri, projectRef, resolverValue, source, caller)) } yield res @@ -54,8 +54,8 @@ final class ResolversImpl private ( source: Json )(implicit caller: Caller): IO[ResolverResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(projectRef) + iri <- expandIri(id, pc) resolverValue <- sourceDecoder(projectRef, pc, iri, source).toCatsIO res <- eval(CreateResolver(iri, projectRef, resolverValue, source, caller)) } yield res @@ -67,8 +67,8 @@ final class ResolversImpl private ( resolverValue: ResolverValue )(implicit caller: Caller): IO[ResolverResource] = { for { - pc <- fetchContext.onCreate(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onCreate(projectRef) + iri <- expandIri(id, pc) source = ResolverValue.generateSource(iri, resolverValue) res <- eval(CreateResolver(iri, projectRef, resolverValue, source, caller)) } yield res @@ -81,8 +81,8 @@ final class ResolversImpl private ( source: Json )(implicit caller: Caller): IO[ResolverResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) resolverValue <- sourceDecoder(projectRef, pc, iri, source).toCatsIO res <- eval(UpdateResolver(iri, projectRef, resolverValue, source, rev, caller)) } yield res @@ -97,8 +97,8 @@ final class ResolversImpl private ( caller: Caller ): IO[ResolverResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) source = ResolverValue.generateSource(iri, resolverValue) res <- eval(UpdateResolver(iri, projectRef, resolverValue, source, rev, caller)) } yield res @@ -114,8 +114,8 @@ final class ResolversImpl private ( subject: Identity.Subject ): IO[ResolverResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(TagResolver(iri, projectRef, tagRev, tag, rev, subject)) } yield res }.span("tagResolver") @@ -126,16 +126,16 @@ final class ResolversImpl private ( rev: Int )(implicit subject: Identity.Subject): IO[ResolverResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(DeprecateResolver(iri, projectRef, rev, subject)) } yield res }.span("deprecateResolver") override def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[ResolverResource] = { for { - pc <- fetchContext.onRead(projectRef).toCatsIO - iri <- expandIri(id.value, pc).toCatsIO + pc <- fetchContext.onRead(projectRef) + iri <- expandIri(id.value, pc) notFound = ResolverNotFound(iri, projectRef) state <- id match { case Latest(_) => log.stateOr(projectRef, iri, notFound) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImpl.scala index 8d7fbbda12..ece53360dc 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImpl.scala @@ -26,7 +26,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import io.circe.Json -import monix.bio.{IO => BIO} final class ResourcesImpl private ( log: ResourcesLog, @@ -43,7 +42,7 @@ final class ResourcesImpl private ( tag: Option[UserTag] )(implicit caller: Caller): IO[DataResource] = { for { - projectContext <- fetchContext.onCreate(projectRef).toCatsIO + projectContext <- fetchContext.onCreate(projectRef) schemeRef <- IO.fromEither(expandResourceRef(schema, projectContext)) jsonld <- sourceParser(projectRef, projectContext, source).toCatsIO res <- eval(CreateResource(jsonld.iri, projectRef, schemeRef, source, jsonld, caller, tag)) @@ -188,11 +187,11 @@ final class ResourcesImpl private ( ): IO[DataResource] = fetchState(id, projectRef, schemaOpt).map(_.toResource) private def expandWithContext( - fetchCtx: ProjectRef => BIO[ProjectContextRejection, ProjectContext], + fetchCtx: ProjectRef => IO[ProjectContext], ref: ProjectRef, id: IdSegment ): IO[(Iri, ProjectContext)] = - fetchCtx(ref).flatMap(pc => expandIri(id, pc).map(_ -> pc)).toCatsIO + fetchCtx(ref).flatMap(pc => expandIri(id, pc).map(_ -> pc)) private def eval(cmd: ResourceCommand): IO[DataResource] = log.evaluate(cmd.project, cmd.id, cmd).map(_._2.toResource) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrial.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrial.scala index 40cd1d7ed2..c3f70df257 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrial.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrial.scala @@ -84,7 +84,7 @@ object ResourcesTrial { caller: Caller ): IO[ResourceGenerationResult] = { for { - projectContext <- fetchContext.onRead(project).toCatsIO + projectContext <- fetchContext.onRead(project) schemaRef <- IO.fromEither(Resources.expandResourceRef(schema, projectContext)) jsonld <- sourceParser(project, projectContext, source.value).toCatsIO validation <- validateResource(jsonld.iri, jsonld.expanded, schemaRef, project, caller) @@ -98,7 +98,7 @@ object ResourcesTrial { caller: Caller ): IO[ResourceGenerationResult] = { for { - projectContext <- fetchContext.onRead(project).toCatsIO + projectContext <- fetchContext.onRead(project) jsonld <- sourceParser(project, projectContext, source.value).toCatsIO validation <- validateResource(jsonld.iri, jsonld.expanded, schema) result <- toResourceF(project, jsonld, source, validation) @@ -111,7 +111,7 @@ object ResourcesTrial { caller: Caller ): IO[ValidationResult] = { for { - projectContext <- fetchContext.onRead(project).toCatsIO + projectContext <- fetchContext.onRead(project) schemaRefOpt <- IO.fromEither(expandResourceRef(schemaOpt, projectContext)) resource <- fetchResource(id, project) report <- validateResource( diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala index 296eaa7012..3e3db17bcc 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala @@ -70,8 +70,8 @@ final class SchemasImpl private ( source: Json )(implicit caller: Caller): IO[SchemaResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) (compacted, expanded) <- sourceParser(projectRef, pc, iri, source).toCatsIO.map { j => (j.compacted, j.expanded) } expandedResolved <- resolveImports(iri, projectRef, expanded).toCatsIO res <- @@ -84,8 +84,8 @@ final class SchemasImpl private ( projectRef: ProjectRef )(implicit caller: Caller): IO[SchemaResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) schema <- log.stateOr(projectRef, iri, SchemaNotFound(iri, projectRef)) (compacted, expanded) <- sourceParser(projectRef, pc, iri, schema.source).toCatsIO.map { j => (j.compacted, j.expanded) @@ -104,8 +104,8 @@ final class SchemasImpl private ( rev: Int )(implicit caller: Subject): IO[SchemaResource] = { for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(TagSchema(iri, projectRef, tagRev, tag, rev, caller)) } yield res }.span("tagSchema") @@ -117,8 +117,8 @@ final class SchemasImpl private ( rev: Int )(implicit caller: Subject): IO[SchemaResource] = (for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(DeleteSchemaTag(iri, projectRef, tag, rev, caller)) } yield res).span("deleteSchemaTag") @@ -128,15 +128,15 @@ final class SchemasImpl private ( rev: Int )(implicit caller: Subject): IO[SchemaResource] = (for { - pc <- fetchContext.onModify(projectRef).toCatsIO - iri <- expandIri(id, pc).toCatsIO + pc <- fetchContext.onModify(projectRef) + iri <- expandIri(id, pc) res <- eval(DeprecateSchema(iri, projectRef, rev, caller)) } yield res).span("deprecateSchema") override def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[SchemaResource] = { for { - pc <- fetchContext.onRead(projectRef).toCatsIO - iri <- expandIri(id.value, pc).toCatsIO + pc <- fetchContext.onRead(projectRef) + iri <- expandIri(id.value, pc) state <- id match { case Latest(_) => log.stateOr(projectRef, iri, SchemaNotFound(iri, projectRef)) case Revision(_, rev) => diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectivesSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectivesSpec.scala index 4c19cd34a6..a7dfff4a9c 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectivesSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaSchemeDirectivesSpec.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.MediaRanges.`*/*` import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ @@ -12,10 +13,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectCon import ch.epfl.bluebrain.nexus.delta.sdk.utils.RouteHelpers import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.testkit.scalatest.TestMatchers -import ch.epfl.bluebrain.nexus.testkit.scalatest.bio.BIOValues import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} -import monix.bio.UIO -import monix.execution.Scheduler import org.scalatest.matchers.should.Matchers import org.scalatest.{Inspectors, OptionValues} @@ -27,23 +25,20 @@ class DeltaSchemeDirectivesSpec with OptionValues with CirceLiteral with UriDirectives - with BIOValues with TestMatchers with TestHelpers with Inspectors { - implicit private val sc: Scheduler = Scheduler.global - implicit private val baseUri: BaseUri = BaseUri("http://localhost/base//", Label.unsafe("v1")) private val schemaView = nxv + "schema" private val mappings = ApiMappings("alias" -> (nxv + "alias"), "nxv" -> nxv.base, "view" -> schemaView) private val vocab = iri"http://localhost/vocab/" - private val fetchContext = (_: ProjectRef) => UIO.pure(ProjectContext.unsafe(mappings, nxv.base, vocab)) + private val fetchContext = (_: ProjectRef) => IO.pure(ProjectContext.unsafe(mappings, nxv.base, vocab)) - private val fetchOrgByUuid = (_: UUID) => UIO.none - private val fetchProjectByUuid = (_: UUID) => UIO.none + private val fetchOrgByUuid = (_: UUID) => IO.none + private val fetchProjectByUuid = (_: UUID) => IO.none private val schemeDirectives = new DeltaSchemeDirectives(fetchContext, fetchOrgByUuid, fetchProjectByUuid) diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextDummy.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextDummy.scala index d9855f5b52..d9636042df 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextDummy.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextDummy.scala @@ -1,10 +1,13 @@ package ch.epfl.bluebrain.nexus.delta.sdk.projects +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, Project, ProjectContext, ProjectRejection} import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectRejection.{ProjectIsDeprecated, ProjectNotFound} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef} -import monix.bio.IO + +import scala.reflect.ClassTag class FetchContextDummy private ( expected: Map[ProjectRef, ProjectContext], @@ -14,15 +17,15 @@ class FetchContextDummy private ( override def defaultApiMappings: ApiMappings = ApiMappings.empty - override def onRead(ref: ProjectRef): IO[ContextRejection, ProjectContext] = + override def onRead(ref: ProjectRef): IO[ProjectContext] = IO.fromEither(expected.get(ref).toRight(ContextRejection(ProjectNotFound(ref).asInstanceOf[ProjectRejection]))) - override def onCreate(ref: ProjectRef)(implicit subject: Identity.Subject): IO[ContextRejection, ProjectContext] = + override def onCreate(ref: ProjectRef)(implicit subject: Identity.Subject): IO[ProjectContext] = IO.raiseWhen(rejectOnCreate.contains(ref))( ContextRejection(ProjectIsDeprecated(ref).asInstanceOf[ProjectRejection]) ) >> onRead(ref) - override def onModify(ref: ProjectRef)(implicit subject: Identity.Subject): IO[ContextRejection, ProjectContext] = + override def onModify(ref: ProjectRef)(implicit subject: Identity.Subject): IO[ProjectContext] = IO.raiseWhen(rejectOnModify.contains(ref))( ContextRejection(ProjectIsDeprecated(ref).asInstanceOf[ProjectRejection]) ) >> onRead(ref) @@ -44,7 +47,7 @@ object FetchContextDummy { def apply(expected: List[Project]): FetchContext[ContextRejection] = new FetchContextDummy(expected.map { p => p.ref -> p.context }.toMap, Set.empty, Set.empty) - def apply[R]( + def apply[R <: Throwable: ClassTag]( expected: Map[ProjectRef, ProjectContext], rejectOnCreate: Set[ProjectRef], rejectOnModify: Set[ProjectRef], @@ -52,17 +55,20 @@ object FetchContextDummy { ): FetchContext[R] = new FetchContextDummy(expected, rejectOnCreate, rejectOnModify).mapRejection(f) - def apply[R]( + def apply[R <: Throwable: ClassTag]( expected: Map[ProjectRef, ProjectContext], rejectOnCreateOrModify: Set[ProjectRef], f: ContextRejection => R ): FetchContext[R] = apply(expected, rejectOnCreateOrModify, rejectOnCreateOrModify, f) - def apply[R](expected: Map[ProjectRef, ProjectContext], f: ContextRejection => R): FetchContext[R] = + def apply[R <: Throwable: ClassTag]( + expected: Map[ProjectRef, ProjectContext], + f: ContextRejection => R + ): FetchContext[R] = apply(expected, Set.empty, f) - def apply[R](projects: List[Project], f: ContextRejection => R): FetchContext[R] = + def apply[R <: Throwable: ClassTag](projects: List[Project], f: ContextRejection => R): FetchContext[R] = apply(projects.map { p => p.ref -> p.context }.toMap, Set.empty, f) } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextSuite.scala index ecba7061c4..40f1ceae5a 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/FetchContextSuite.scala @@ -1,30 +1,30 @@ package ch.epfl.bluebrain.nexus.delta.sdk.projects import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen -import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.OrganizationRejection import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.OrganizationRejection.{OrganizationIsDeprecated, OrganizationNotFound} +import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection +import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectRejection.{ProjectIsDeprecated, ProjectIsMarkedForDeletion, ProjectNotFound} -import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectRejection} import ch.epfl.bluebrain.nexus.delta.sdk.quotas.Quotas +import ch.epfl.bluebrain.nexus.delta.sdk.quotas.model.Quota import ch.epfl.bluebrain.nexus.delta.sdk.quotas.model.QuotaRejection.QuotaReached.{QuotaEventsReached, QuotaResourcesReached} -import ch.epfl.bluebrain.nexus.delta.sdk.quotas.model.{Quota, QuotaRejection} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.mu.bio.BioSuite -import monix.bio.{IO => BIO} +import ch.epfl.bluebrain.nexus.testkit.mu.ce.CatsEffectSuite -class FetchContextSuite extends BioSuite { +class FetchContextSuite extends CatsEffectSuite { implicit private val subject: Subject = Identity.Anonymous private val activeOrg = Label.unsafe("org") private val deprecatedOrg = Label.unsafe("deprecated") - private def fetchActiveOrganization(label: Label): BIO[OrganizationRejection, Unit] = label match { - case `activeOrg` => BIO.unit - case `deprecatedOrg` => BIO.raiseError(OrganizationIsDeprecated(deprecatedOrg)) - case _ => BIO.raiseError(OrganizationNotFound(label)) + private def fetchActiveOrganization(label: Label): IO[Unit] = label match { + case `activeOrg` => IO.unit + case `deprecatedOrg` => IO.raiseError(OrganizationIsDeprecated(deprecatedOrg)) + case _ => IO.raiseError(OrganizationNotFound(label)) } private val activeProject = ProjectRef.unsafe("org", "proj") @@ -36,16 +36,16 @@ class FetchContextSuite extends BioSuite { ProjectGen.project(deprecatedProject.organization.value, deprecatedProject.project.value) private def fetchProject(ref: ProjectRef) = ref match { - case `activeProject` => BIO.pure(ProjectGen.resourceFor(activeProjectValue)) + case `activeProject` => IO.pure(ProjectGen.resourceFor(activeProjectValue)) case `deletedProject` => - BIO.pure( + IO.pure( ProjectGen.resourceFor( ProjectGen.project(deletedProject.organization.value, deletedProject.project.value), markedForDeletion = true ) ) - case `deprecatedProject` => BIO.pure(ProjectGen.resourceFor(deprecatedProjectValue, deprecated = true)) - case _ => BIO.raiseError(ProjectNotFound(ref)) + case `deprecatedProject` => IO.pure(ProjectGen.resourceFor(deprecatedProjectValue, deprecated = true)) + case _ => IO.raiseError(ProjectNotFound(ref)) } private def quotas(resources: Boolean, events: Boolean) = new Quotas { @@ -66,78 +66,82 @@ class FetchContextSuite extends BioSuite { ) test("Successfully get a context for an active project on read") { - fetchContext(quotasResources = true, quotasEvents = true).onRead(activeProject).assert(activeProjectValue.context) + fetchContext(quotasResources = true, quotasEvents = true) + .onRead(activeProject) + .assertEquals(activeProjectValue.context) } test("Successfully get a context for a deprecated project on read") { fetchContext(quotasResources = true, quotasEvents = true) .onRead(deprecatedProject) - .assert(deprecatedProjectValue.context) + .assertEquals(deprecatedProjectValue.context) } test("Fail getting a context for a project marked as deleted on read") { fetchContext(quotasResources = true, quotasEvents = true) .onRead(deletedProject) - .mapError(_.value.asInstanceOf[ProjectRejection]) - .error(ProjectIsMarkedForDeletion(deletedProject)) + .adaptError { case c: ContextRejection => c.value } + .intercept(ProjectIsMarkedForDeletion(deletedProject)) } test("Successfully get a context for an active project on create if quota is not reached") { - fetchContext(quotasResources = false, quotasEvents = false).onRead(activeProject).assert(activeProjectValue.context) + fetchContext(quotasResources = false, quotasEvents = false) + .onRead(activeProject) + .assertEquals(activeProjectValue.context) } test("Fail getting a context for an active project on create if quota for resources is not reached") { fetchContext(quotasResources = true, quotasEvents = false) .onCreate(activeProject) - .mapError(_.value.asInstanceOf[QuotaRejection]) - .error(QuotaResourcesReached(activeProject, 0)) + .adaptError { case c: ContextRejection => c.value } + .intercept(QuotaResourcesReached(activeProject, 0)) } test("Fail getting a context for a deprecated project on create") { fetchContext(quotasResources = false, quotasEvents = false) .onCreate(deprecatedProject) - .mapError(_.value.asInstanceOf[ProjectRejection]) - .error(ProjectIsDeprecated(deprecatedProject)) + .adaptError { case c: ContextRejection => c.value } + .intercept(ProjectIsDeprecated(deprecatedProject)) } test("Fail getting a context for a project marked as deleted on create") { fetchContext(quotasResources = false, quotasEvents = false) .onCreate(deletedProject) - .mapError(_.value.asInstanceOf[ProjectRejection]) - .error(ProjectIsMarkedForDeletion(deletedProject)) + .adaptError { case c: ContextRejection => c.value } + .intercept(ProjectIsMarkedForDeletion(deletedProject)) } test("Successfully get a context for an active project on modify if quotas are not reached") { fetchContext(quotasResources = false, quotasEvents = false) .onModify(activeProject) - .assert(activeProjectValue.context) + .assertEquals(activeProjectValue.context) } test("Successfully get a context for an active project on modify if only resource quota is reached") { fetchContext(quotasResources = true, quotasEvents = false) .onModify(activeProject) - .assert(activeProjectValue.context) + .assertEquals(activeProjectValue.context) } test("Fail getting a context for an active project on create if event quotas is reached") { fetchContext(quotasResources = false, quotasEvents = true) .onModify(activeProject) - .mapError(_.value.asInstanceOf[QuotaRejection]) - .error(QuotaEventsReached(activeProject, 0)) + .adaptError { case c: ContextRejection => c.value } + .intercept(QuotaEventsReached(activeProject, 0)) } test("Fail getting a context for a deprecated project on modify") { fetchContext(quotasResources = false, quotasEvents = false) .onModify(deprecatedProject) - .mapError(_.value.asInstanceOf[ProjectRejection]) - .error(ProjectIsDeprecated(deprecatedProject)) + .adaptError { case c: ContextRejection => c.value } + .intercept(ProjectIsDeprecated(deprecatedProject)) } test("Fail getting a context for a project marked as deleted on modify") { fetchContext(quotasResources = false, quotasEvents = false) .onModify(deletedProject) - .mapError(_.value.asInstanceOf[ProjectRejection]) - .error(ProjectIsMarkedForDeletion(deletedProject)) + .adaptError { case c: ContextRejection => c.value } + .intercept(ProjectIsMarkedForDeletion(deletedProject)) } } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCacheSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCacheSuite.scala index ed6faa894c..6c16bfda5a 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCacheSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/projects/UUIDCacheSuite.scala @@ -8,18 +8,18 @@ import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.OrganizationState import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectState import ch.epfl.bluebrain.nexus.delta.sourcing.config.QueryConfig import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} +import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie import ch.epfl.bluebrain.nexus.delta.sourcing.query.RefreshStrategy import ch.epfl.bluebrain.nexus.delta.sourcing.state.{GlobalStateStore, ScopedStateStore} -import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie import ch.epfl.bluebrain.nexus.testkit.ce.CatsRunContext -import ch.epfl.bluebrain.nexus.testkit.mu.bio.BioSuite +import ch.epfl.bluebrain.nexus.testkit.mu.ce.CatsEffectSuite import doobie.implicits._ import munit.AnyFixture import java.util.UUID import scala.concurrent.duration._ -class UUIDCacheSuite extends BioSuite with CatsRunContext with Doobie.Fixture { +class UUIDCacheSuite extends CatsEffectSuite with CatsRunContext with Doobie.Fixture { override def munitFixtures: Seq[AnyFixture[_]] = List(doobie) @@ -34,7 +34,7 @@ class UUIDCacheSuite extends BioSuite with CatsRunContext with Doobie.Fixture { private lazy val xas = doobie() - private lazy val uuidCache = UUIDCache(cacheConfig, cacheConfig, xas).runSyncUnsafe() + private lazy val uuidCache = UUIDCache(cacheConfig, cacheConfig, xas).unsafeRunSync() private lazy val orgStore = GlobalStateStore(Organizations.entityType, OrganizationState.serializer, queryConfig, xas) private lazy val projectStore = ScopedStateStore(Projects.entityType, ProjectState.serializer, queryConfig, xas) diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala index 2983d102fc..bcb45bfccd 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.sdk.generators.{ResolverResolutionGen, ResourceGen, SchemaGen} diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFixedClock.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFixedClock.scala index 9a036851a3..1c06e195fd 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFixedClock.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFixedClock.scala @@ -13,12 +13,13 @@ trait IOFixedClock { override def monotonic(unit: TimeUnit): UIO[Long] = UIO.pure(instant.toEpochMilli) } + private val realClock: Clock[IO] = Clock.create + implicit def bioClock: Clock[UIO] = bioClock(Instant.EPOCH) def ceClock(instant: Instant): Clock[IO] = new Clock[IO] { - override def realTime(unit: TimeUnit): IO[Long] = IO.pure(instant.toEpochMilli) - - override def monotonic(unit: TimeUnit): IO[Long] = IO.pure(instant.toEpochMilli) + override def realTime(unit: TimeUnit): IO[Long] = IO.pure(instant.toEpochMilli) + override def monotonic(unit: TimeUnit): IO[Long] = realClock.monotonic(unit) } implicit def ceClock: Clock[IO] = ceClock(Instant.EPOCH) diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFromMap.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFromMap.scala deleted file mode 100644 index 51806fa34f..0000000000 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/bio/IOFromMap.scala +++ /dev/null @@ -1,27 +0,0 @@ -package ch.epfl.bluebrain.nexus.testkit.bio - -import monix.bio.{IO, UIO} - -trait IOFromMap { - - /** - * Convert a map to an function returning an IO - * - * @param values - * (key/value) giving the expected result for the given parameter - */ - final def ioFromMap[A, B](values: (A, B)*): A => UIO[Option[B]] = - (a: A) => IO.pure(values.toMap.get(a)) - - /** - * Convert a map to an function returning an IO - * - * @param map - * the map giving the expected result for the given parameter - * @param ifAbsent - * which error to return if the parameter can't be found - */ - final def ioFromMap[A, B, C](map: Map[A, B], ifAbsent: A => C): A => IO[C, B] = - (a: A) => IO.fromOption(map.get(a), ifAbsent(a)) - -} diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala index 5b1dbf48ad..36b486ca68 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala @@ -60,6 +60,11 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(PUT, url, Some(body), identity, extraHeaders)(assertResponse) + def putAndReturn[A](url: String, body: Json, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( + assertResponse: (A, HttpResponse) => (A, Assertion) + )(implicit um: FromEntityUnmarshaller[A]): IO[A] = + requestAssertAndReturn(PUT, url, Some(body), identity, extraHeaders)(assertResponse).map(_._1) + def putAttachmentFromPath[A]( url: String, path: Path, @@ -139,25 +144,25 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(DELETE, url, None, identity, extraHeaders)(assertResponse) - def requestAssert[A]( + def requestAssertAndReturn[A]( method: HttpMethod, url: String, body: Option[Json], identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { + )(assertResponse: (A, HttpResponse) => (A, Assertion))(implicit um: FromEntityUnmarshaller[A]): IO[(A, Assertion)] = { def buildClue(a: A, response: HttpResponse) = s""" - |Endpoint: ${method.value} $url - |Identity: $identity - |Token: ${Option(tokensMap.get(identity)).map(_.credentials.token()).getOrElse("None")} - |Status code: ${response.status} - |Body: ${body.getOrElse("None")} - |Response: - |$a - |""".stripMargin - - requestJson( + |Endpoint: ${method.value} $url + |Identity: $identity + |Token: ${Option(tokensMap.get(identity)).map(_.credentials.token()).getOrElse("None")} + |Status code: ${response.status} + |Body: ${body.getOrElse("None")} + |Response: + |$a + |""".stripMargin + + requestJson[A, (A, Assertion)]( method, url, body, @@ -167,6 +172,17 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit ) } + def requestAssert[A]( + method: HttpMethod, + url: String, + body: Option[Json], + identity: Identity, + extraHeaders: Seq[HttpHeader] = jsonHeaders + )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = + requestAssertAndReturn[A](method, url, body, identity, extraHeaders) { (a, resp) => + (a, assertResponse(a, resp)) + }.map(_._2) + def sparqlQuery[A](url: String, query: String, identity: Identity, extraHeaders: Seq[HttpHeader] = Nil)( assertResponse: (A, HttpResponse) => Assertion )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala index 1763ebb0d3..ae2154eead 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/Optics.scala @@ -60,7 +60,8 @@ object Optics extends Optics { val `@type` = root.`@type`.string val _uuid = root._uuid.string - val _total = root._total.long + val _total = root._total.long + val _filename = root._filename.string val hits = root.hits.hits val totalHits = root.hits.total.value.int diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala new file mode 100644 index 0000000000..905608f3d6 --- /dev/null +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CopyFileSpec.scala @@ -0,0 +1,50 @@ +package ch.epfl.bluebrain.nexus.tests.kg + +import akka.http.scaladsl.model._ +import akka.util.ByteString +import cats.effect.IO +import cats.implicits.catsSyntaxFlatMapOps +import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils +import ch.epfl.bluebrain.nexus.tests.HttpClient._ +import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote +import ch.epfl.bluebrain.nexus.tests.Optics +import io.circe.Json +import io.circe.syntax.KeyOps +import org.scalatest.Assertion + +trait CopyFileSpec { self: StorageSpec => + + "Copying a json file to a different organization" should { + + def givenAProjectWithStorage(test: String => IO[Assertion]): IO[Assertion] = { + val (proj, org) = (genId(), genId()) + val projRef = s"$org/$proj" + createProjects(Coyote, org, proj) >> + createStorages(projRef) >> + test(projRef) + } + + "succeed" in { + givenAProjectWithStorage { destProjRef => + val sourceFileId = "attachment.json" + val destFileId = "attachment2.json" + val destFilename = genId() + + val source = Json.obj("projectRef" := self.projectRef, "fileId" := sourceFileId) + val payload = Json.obj("destinationFilename" := destFilename, "source" := source) + val uri = s"/files/$destProjRef/$destFileId?storage=nxv:$storageId" + + for { + json <- deltaClient.putAndReturn[Json](uri, payload, Coyote) { (json, response) => + (json, expectCreated(json, response)) + } + returnedId = Optics.`@id`.getOption(json).getOrElse(fail("could not find @id of created resource")) + assertion <- + deltaClient.get[ByteString](s"/files/$destProjRef/${UrlUtils.encode(returnedId)}", Coyote, acceptAll) { + expectDownload(destFilename, ContentTypes.`application/json`, updatedJsonFileContent) + } + } yield assertion + } + } + } +} diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala index bdc4007d7a..f3f56d01e1 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala @@ -8,7 +8,7 @@ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission import io.circe.Json import org.scalatest.Assertion -class DiskStorageSpec extends StorageSpec { +class DiskStorageSpec extends StorageSpec with CopyFileSpec { override def storageName: String = "disk" @@ -32,7 +32,7 @@ class DiskStorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf("/kg/storages/disk.json") val payload2 = jsonContentOf("/kg/storages/disk-perms.json") diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala index 4bf6b3ab60..ad93d53f4f 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala @@ -17,7 +17,7 @@ import org.scalatest.Assertion import scala.annotation.nowarn import scala.sys.process._ -class RemoteStorageSpec extends StorageSpec { +class RemoteStorageSpec extends StorageSpec with CopyFileSpec { override def storageName: String = "external" @@ -60,7 +60,7 @@ class RemoteStorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/remote-disk.json", "endpoint" -> externalEndpoint, diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala index cd864ea7f6..8d76a09be3 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala @@ -82,7 +82,7 @@ class S3StorageSpec extends StorageSpec { ): _* ) - override def createStorages: IO[Assertion] = { + override def createStorages(projectRef: String): IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/s3.json", "storageId" -> s"https://bluebrain.github.io/nexus/vocabulary/$storageId", diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala index 023a1e1fa3..b95175c0a1 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala @@ -39,7 +39,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { def locationPrefix: Option[String] - def createStorages: IO[Assertion] + def createStorages(projectRef: String): IO[Assertion] protected def fileSelf(project: String, id: String): String = { val uri = Uri(s"${config.deltaUri}/files/$project") @@ -48,6 +48,9 @@ abstract class StorageSpec extends BaseIntegrationSpec { private[tests] val fileSelfPrefix = fileSelf(projectRef, attachmentPrefix) + val jsonFileContent = """{ "initial": ["is", "a", "test", "file"] }""" + val updatedJsonFileContent = """{ "updated": ["is", "a", "test", "file"] }""" + override def beforeAll(): Unit = { super.beforeAll() createProjects(Coyote, orgId, projId).accepted @@ -55,7 +58,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { "Creating a storage" should { s"succeed for a $storageName storage" in { - createStorages + createStorages(projectRef) } "wait for storages to be indexed" in { @@ -91,9 +94,6 @@ abstract class StorageSpec extends BaseIntegrationSpec { "A json file" should { - val jsonFileContent = """{ "initial": ["is", "a", "test", "file"] }""" - val updatedJsonFileContent = """{ "updated": ["is", "a", "test", "file"] }""" - "be uploaded" in { deltaClient.uploadFile[Json]( s"/files/$projectRef/attachment.json?storage=nxv:$storageId", @@ -424,7 +424,7 @@ abstract class StorageSpec extends BaseIntegrationSpec { s"=?UTF-8?B?$encodedFilename?=" } - private def expectDownload( + protected def expectDownload( expectedFilename: String, expectedContentType: ContentType, expectedContent: String,