Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

👷 ci(tofhir-server): Deploy an onFhir test contai… #182

Merged
merged 5 commits into from
May 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@
<postgres.version>42.7.0</postgres.version>
<h2db.version>2.2.224</h2db.version>
<cron4j.version>2.2.5</cron4j.version>
<testcontainers.kafka>1.19.3</testcontainers.kafka>
<testcontainers>1.19.3</testcontainers>
<akka.version>2.8.5</akka.version>
<akka-http.version>10.5.3</akka-http.version>
<reflections.version>0.10.2</reflections.version>
Expand Down Expand Up @@ -488,12 +488,13 @@
<scope>test</scope>
</dependency>

<!-- Test containers to initialize Kafka during tests -->
<!-- Test containers to initialize Docker containers during tests -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
<version>${testcontainers.kafka}</version>
<scope>test</scope>
<artifactId>testcontainers-bom</artifactId>
<version>${testcontainers}</version>
<type>pom</type>
<scope>import</scope>
</dependency>

<!-- Reflections -->
Expand Down
11 changes: 11 additions & 0 deletions tofhir-engine/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,17 @@
<artifactId>kafka</artifactId>
<scope>test</scope>
</dependency>
<!-- onFhir test container-->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<!-- Reflection -->
<dependency>
<groupId>org.reflections</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
"jsonClass" : "FileSystemSourceSettings",
"name" : "patient-test-data",
"sourceUri" : "http://test-data",
"dataFolderPath" : "/test-data",
"dataFolderPath" : "./test-data",
"asStream" : false
},
"patientGender" : {
"jsonClass" : "FileSystemSourceSettings",
"name" : "patient-gender-test-data",
"sourceUri" : "http://test-data",
"dataFolderPath" : "/test-data-gender",
"dataFolderPath" : "./test-data-gender",
"asStream" : false
}
},
Expand Down
23 changes: 23 additions & 0 deletions tofhir-engine/src/test/scala/io/tofhir/ToFhirTestSpec.scala
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
package io.tofhir

import akka.actor.ActorSystem
import io.onfhir.client.OnFhirNetworkClient
import io.tofhir.engine.config.ToFhirConfig
import io.tofhir.engine.execution.RunningJobRegistry
import io.tofhir.engine.mapping._
import io.tofhir.engine.util.FileUtils
import org.apache.spark.sql.SparkSession
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inside, Inspectors, OptionValues}
import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.wait.strategy.Wait
import org.testcontainers.junit.jupiter.Container
import org.testcontainers.utility.DockerImageName

import java.io.FileWriter
import java.net.URI
Expand All @@ -29,6 +34,24 @@ trait ToFhirTestSpec extends Matchers with OptionValues with Inside with Inspect

implicit val actorSystem: ActorSystem = ActorSystem("toFhirEngineTest")

// Instance of OnFhirNetworkClient initialized with onFhir container
var onFhirClient: OnFhirNetworkClient = initializeOnFhirClient();

/**
* Deploy an onFhir container for testing purpose
* */
def initializeOnFhirClient(): OnFhirNetworkClient = {
@Container
val container: GenericContainer[Nothing] = new GenericContainer(DockerImageName.parse("srdc/onfhir:r4")).withExposedPorts(8081);
container.addEnv("DB_EMBEDDED", "true");
container.addEnv("SERVER_PORT", "8081");
container.addEnv("SERVER_BASE_URI", "fhir");
container.addEnv("FHIR_ROOT_URL", s"http://${container.getHost}:8081/fhir");
container.waitingFor(Wait.forHttp("/fhir").forStatusCode(200));
container.start();
OnFhirNetworkClient.apply(s"http://${container.getHost}:${container.getFirstMappedPort}/fhir");
}

/**
* Copies the content of a resource file to given location in the context path.
* @param path The path to the resource file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,12 @@ package io.tofhir.integrationtest
import akka.http.scaladsl.model.StatusCodes
import io.onfhir.api.client.FhirBatchTransactionRequestBuilder
import io.onfhir.api.util.FHIRUtil
import io.onfhir.client.OnFhirNetworkClient
import io.onfhir.path.FhirPathUtilFunctionsFactory
import io.tofhir.common.model.Json4sSupport.formats
import io.tofhir.ToFhirTestSpec
import io.tofhir.common.model.Json4sSupport.formats
import io.tofhir.engine.Execution.actorSystem.dispatcher
import io.tofhir.engine.mapping.FhirMappingJobManager
import io.tofhir.engine.model._
import io.tofhir.engine.util.FhirMappingJobFormatter.EnvironmentVariable
import io.tofhir.engine.util.FhirMappingUtility
import org.apache.commons.io
import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig, NewTopic}
Expand All @@ -30,14 +28,11 @@ import java.util.concurrent.TimeUnit
import java.util.{Collections, Properties, UUID}
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{Await, Future}
import scala.util.Try

class KafkaSourceIntegrationTest extends AnyFlatSpec with ToFhirTestSpec with BeforeAndAfterAll {

override protected def afterAll(): Unit = {
if(fhirServerIsAvailable) {
deleteResources()
}
deleteResources()
if (adminClient != null) adminClient.close()
if (producer != null) producer.close()
if (consumer != null) consumer.close()
Expand Down Expand Up @@ -84,7 +79,7 @@ class KafkaSourceIntegrationTest extends AnyFlatSpec with ToFhirTestSpec with Be
val streamingSourceSettings: Map[String, KafkaSourceSettings] =
Map("source" -> KafkaSourceSettings("kafka-source", "https://aiccelerate.eu/data-integration-suite/kafka-data", s"PLAINTEXT://localhost:$kafkaPort"))

val fhirSinkSettings: FhirRepositorySinkSettings = FhirRepositorySinkSettings(fhirRepoUrl = sys.env.getOrElse(EnvironmentVariable.FHIR_REPO_URL.toString, "http://localhost:8081/fhir"))
val fhirSinkSettings: FhirRepositorySinkSettings = FhirRepositorySinkSettings(fhirRepoUrl = onFhirClient.getBaseUrl())

val patientMappingTask: FhirMappingTask = FhirMappingTask(
mappingRef = "https://aiccelerate.eu/fhir/mappings/patient-mapping",
Expand All @@ -107,11 +102,6 @@ class KafkaSourceIntegrationTest extends AnyFlatSpec with ToFhirTestSpec with Be
dataProcessingSettings = DataProcessingSettings()
)

val onFhirClient: OnFhirNetworkClient = OnFhirNetworkClient.apply(fhirSinkSettings.fhirRepoUrl)
val fhirServerIsAvailable: Boolean =
Try(Await.result(onFhirClient.search("Patient").execute(), FiniteDuration(5, TimeUnit.SECONDS)).httpStatus == StatusCodes.OK)
.getOrElse(false)

val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map(FhirPathUtilFunctionsFactory.defaultPrefix -> FhirPathUtilFunctionsFactory), sparkSession)

it should "check the test container working" in {
Expand Down Expand Up @@ -196,15 +186,14 @@ class KafkaSourceIntegrationTest extends AnyFlatSpec with ToFhirTestSpec with Be
}

it should "consume patients, observations and family member history data and map and write to the fhir repository" in {
assume(fhirServerIsAvailable)
val execution: FhirMappingJobExecution = FhirMappingJobExecution(job = fhirMappingJob, mappingTasks = Seq(patientMappingTask, otherObservationMappingTask, familyMemberHistoryMappingTask))
val streamingQueryFutures: Map[String, Future[StreamingQuery]] = fhirMappingJobManager.startMappingJobStream(mappingJobExecution =
execution,
sourceSettings = streamingSourceSettings,
sinkSettings = fhirSinkSettings
)
streamingQueryFutures.foreach(sq => {
val streamingQuery: StreamingQuery = Await.result(sq._2, FiniteDuration.apply(5, TimeUnit.SECONDS)) // First wait for the StreamingQuery to become available
val streamingQuery: StreamingQuery = Await.result(sq._2, FiniteDuration.apply(60, TimeUnit.SECONDS)) // First wait for the StreamingQuery to become available
streamingQuery.awaitTermination(20000L) // Wait for 20 seconds to consume and write to the fhir repo and terminate
streamingQuery.stop()
io.FileUtils.deleteDirectory(new File(execution.getCheckpointDirectory(sq._1))) // Clear checkpoint directory to prevent conflicts with other tests
Expand Down Expand Up @@ -248,8 +237,6 @@ class KafkaSourceIntegrationTest extends AnyFlatSpec with ToFhirTestSpec with Be
foo()
})
consumer.unsubscribe()

assume(fhirServerIsAvailable)
// modify familyMemberHistoryMappingTask to listen to familyMembersCorrupted topic
val mappingTask = familyMemberHistoryMappingTask.copy(sourceContext = Map("source" -> KafkaSource(topicName = "familyMembersCorrupted", groupId = "tofhir", startingOffsets = "earliest")))
val execution: FhirMappingJobExecution = FhirMappingJobExecution(job = fhirMappingJob, mappingTasks = Seq(mappingTask))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,11 @@ import akka.http.scaladsl.model.StatusCodes
import io.onfhir.api.Resource
import io.onfhir.api.client.FhirBatchTransactionRequestBuilder
import io.onfhir.api.util.FHIRUtil
import io.onfhir.client.OnFhirNetworkClient
import io.onfhir.path.{FhirPathIdentityServiceFunctionsFactory, FhirPathUtilFunctionsFactory}
import io.onfhir.util.JsonFormatter._
import io.tofhir.ToFhirTestSpec
import io.tofhir.engine.mapping.{FhirMappingJobManager, MappingContextLoader}
import io.tofhir.engine.model._
import io.tofhir.engine.util.FhirMappingJobFormatter.EnvironmentVariable
import io.tofhir.engine.util.{FhirMappingJobFormatter, FhirMappingUtility, FileUtils}
import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.{Assertion, BeforeAndAfterAll}
Expand All @@ -20,14 +18,13 @@ import java.nio.file.Paths
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{Await, ExecutionContext}
import scala.util.Try

class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll with ToFhirTestSpec {

val dataSourceSettings: Map[String, DataSourceSettings] =
Map("source" ->
FileSystemSourceSettings("test-source", "https://aiccelerate.eu/data-integration-suite/test-data", Paths.get(getClass.getResource("/test-data").toURI).normalize().toAbsolutePath.toString))
val fhirSinkSettings: FhirRepositorySinkSettings = FhirRepositorySinkSettings(fhirRepoUrl = sys.env.getOrElse(EnvironmentVariable.FHIR_REPO_URL.toString, "http://localhost:8081/fhir"))
val fhirSinkSettings: FhirRepositorySinkSettings = FhirRepositorySinkSettings(fhirRepoUrl = onFhirClient.getBaseUrl())

val patientMappingTask: FhirMappingTask = FhirMappingTask(
mappingRef = "https://aiccelerate.eu/fhir/mappings/patient-mapping",
Expand Down Expand Up @@ -69,11 +66,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
sourceContext = Map("source" -> FileSystemSource(path = "patients.tsv"))
)

val onFhirClient: OnFhirNetworkClient = OnFhirNetworkClient.apply(fhirSinkSettings.fhirRepoUrl)
val fhirServerIsAvailable: Boolean =
Try(Await.result(onFhirClient.search("Patient").execute(), FiniteDuration(5, TimeUnit.SECONDS)).httpStatus == StatusCodes.OK)
.getOrElse(false)

val testMappingJobFilePath: String = getClass.getResource("/test-mappingjob.json").toURI.getPath
val testMappingJobWithIdentityServiceFilePath: String = getClass.getResource("/test-mappingjob-using-services.json").toURI.getPath

Expand All @@ -89,9 +81,7 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
dataProcessingSettings = DataProcessingSettings())

override protected def afterAll(): Unit = {
if (fhirServerIsAvailable) {
deleteResources()
}
deleteResources()
// delete context path
org.apache.commons.io.FileUtils.deleteDirectory(FileUtils.getPath("").toFile)
super.afterAll()
Expand Down Expand Up @@ -171,11 +161,10 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
}

it should "execute a mapping job with two data sources" in {
assume(fhirServerIsAvailable)
val mappingJob = FhirMappingJobFormatter.readMappingJobFromFile(getClass.getResource("/patient-mapping-job-with-two-sources.json").toURI.getPath)

val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map.empty, sparkSession)
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = mappingJob.mappings, job = mappingJob), sourceSettings = mappingJob.sourceSettings, sinkSettings = mappingJob.sinkSettings) flatMap { _ =>
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = mappingJob.mappings, job = mappingJob), sourceSettings = mappingJob.sourceSettings, sinkSettings = mappingJob.sinkSettings.asInstanceOf[FhirRepositorySinkSettings].copy(fhirRepoUrl = onFhirClient.getBaseUrl())) flatMap { _ =>
onFhirClient.read("Patient", "test-patient").executeAndReturnResource() flatMap { p1Resource =>
(p1Resource \ "id").extract[String] shouldBe "test-patient"
(p1Resource \ "gender").extract[String] shouldBe "male"
Expand All @@ -184,8 +173,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
}

it should "execute the mappings with FHIR Path patch" in {
assume(fhirServerIsAvailable)

val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map.empty, sparkSession)
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = Seq(patientMappingTask), job = fhirMappingJob), sourceSettings = dataSourceSettings, sinkSettings = fhirSinkSettings).flatMap(_ =>
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = Seq(patientExtraMappingWithPatch), job = fhirMappingJob) , sourceSettings = dataSourceSettings, sinkSettings = fhirSinkSettings) flatMap { response =>
Expand All @@ -203,7 +190,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
}

it should "execute the mappings with conditional FHIR Path patch" in {
assume(fhirServerIsAvailable)
val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map.empty, sparkSession)
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = Seq(patientMappingTask), job = fhirMappingJob), sourceSettings = dataSourceSettings, sinkSettings = fhirSinkSettings).flatMap(_ =>
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = Seq(patientExtraMappingWithConditionalPatch), job = fhirMappingJob), sourceSettings = dataSourceSettings, sinkSettings = fhirSinkSettings) flatMap { response =>
Expand Down Expand Up @@ -290,8 +276,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit


it should "execute the mapping job with multiple mapping tasks and write the results into a FHIR repository" in {
assume(fhirServerIsAvailable)

val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map(FhirPathUtilFunctionsFactory.defaultPrefix -> FhirPathUtilFunctionsFactory), sparkSession)
fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(mappingTasks = Seq(patientMappingTask, otherObservationMappingTask), job = fhirMappingJob), sourceSettings = dataSourceSettings, sinkSettings = fhirSinkSettings) flatMap { response =>
onFhirClient.read("Patient", FhirMappingUtility.getHashedId("Patient", "p8")).executeAndReturnResource() flatMap { p1Resource =>
Expand All @@ -313,8 +297,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
}

it should "continue execute the mapping job when encounter without an error" in {
assume(fhirServerIsAvailable)

val fhirMappingJobManager = new FhirMappingJobManager(mappingRepository, contextLoader, schemaRepository, Map(FhirPathUtilFunctionsFactory.defaultPrefix -> FhirPathUtilFunctionsFactory), sparkSession)

val future = fhirMappingJobManager.executeMappingJob(mappingJobExecution = FhirMappingJobExecution(
Expand Down Expand Up @@ -380,7 +362,6 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
}

it should "execute the FhirMappingJob using an identity service" in {
assume(fhirServerIsAvailable)
val lMappingJob = FhirMappingJobFormatter.readMappingJobFromFile(testMappingJobWithIdentityServiceFilePath)

val terminologyServiceFolderPath = Paths.get(getClass.getResource("/terminology-service").toURI).normalize().toAbsolutePath.toString
Expand All @@ -394,7 +375,7 @@ class FhirMappingJobManagerTest extends AsyncFlatSpec with BeforeAndAfterAll wit
sourceSettings = dataSourceSettings,
sinkSettings = fhirSinkSettings,
terminologyServiceSettings = Some(terminologyServiceSettings),
identityServiceSettings = lMappingJob.getIdentityServiceSettings()) map { res =>
identityServiceSettings = lMappingJob.copy(sinkSettings = lMappingJob.sinkSettings.asInstanceOf[FhirRepositorySinkSettings].copy(fhirRepoUrl = onFhirClient.getBaseUrl())).getIdentityServiceSettings()) map { res =>
res shouldBe a[Unit]
}
}
Expand Down
Loading
Loading