diff --git a/.travis.yml b/.travis.yml
index c245e0264732..14d16e5e363b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -46,7 +46,7 @@ addons:
# Add various options to make 'mvn install' fast and skip javascript compile (-Ddruid.console.skip=true) since it is not
# needed. Depending on network speeds, "mvn -q install" may take longer than the default 10 minute timeout to print any
# output. To compensate, use travis_wait to extend the timeout.
-install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:it-tools,!:it-image' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
+install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:druid-it-tools,!:druid-it-image,!:druid-it-cases' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
# There are 3 stages of tests
# 1. Tests - phase 1
@@ -73,19 +73,6 @@ jobs:
stage: Tests - phase 1
script: ${MVN} animal-sniffer:check --fail-at-end
- # Experimental run of the revised ITs. Done early to debug issues
- # Disabled for now. Integrating this into the build will come in a later PR.
- - name: "experimental docker tests"
- stage: Tests - phase 1
- # Uses the install defined above. Then, builds the test tools and docker image,
- # and run the various IT tests. If tests fail, echos log lines of any of
- # the Druid services that did not exit normally.
- # Run though install to ensure the test tools are installed, and the docker
- # image is built. The tests only need verify.
- script: ${MVN} install -P dist,test-image,docker-tests,IT-HighAvailability -rf :distribution ${MAVEN_SKIP} -DskipUTs=true
- #after_failure:
- # - docker-tests/check-results.sh
-
- name: "checkstyle"
script: ${MVN} checkstyle:checkstyle --fail-at-end
@@ -470,9 +457,9 @@ jobs:
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
done
- - <<: *integration_batch_index
- name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
- env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
+ #- <<: *integration_batch_index
+ # name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
+ # env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
@@ -679,16 +666,33 @@ jobs:
name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer"
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- - <<: *integration_tests
- name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests"
- jdk: openjdk8
- env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
+ #- <<: *integration_tests
+ # name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests"
+ # jdk: openjdk8
+ # env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
+ # Revised ITs.
+ - &integration_tests_ex
+ name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests (new)"
+ stage: Tests - phase 2
+ jdk: openjdk8
+ services: *integration_test_services
+ env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
+ # Uses the install defined above. Then, builds the test tools and docker image,
+ # and runs one IT. If tests fail, echos log lines of any of
+ # the Druid services that did not exit normally.
+ script: ./it.sh travis HighAvailability
+
+ - <<: *integration_tests_ex
+ name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
+ env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
+ script: ./it.sh travis BatchIndex
+
# END - Integration tests for Compile with Java 8 and Run with Java 8
# START - Integration tests for Compile with Java 8 and Run with Java 11
@@ -769,21 +773,6 @@ jobs:
# END - Integration tests for Compile with Java 8 and Run with Java 11
- # BEGIN - Revised integration tests
-
- # Experimental build of the revised integration test Docker image.
- # Actual tests will come later.
- - name: "experimental docker tests"
- stage: Tests - phase 2
- # Uses the install defined above. Then, builds the test tools and docker image,
- # and run the various IT tests. If tests fail, echos log lines of any of
- # the Druid services that did not exit normally.
- # Run though install to ensure the test tools are installed, and the docker
- # image is built. The tests only need verify.
- script: ${MVN} install -P dist,test-image -rf :distribution ${MAVEN_SKIP} -DskipUTs=true
-
- # END - Revised integration tests
-
- &integration_batch_index_k8s
name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test"
stage: Tests - phase 2
diff --git a/core/src/main/java/org/apache/druid/guice/PolyBind.java b/core/src/main/java/org/apache/druid/guice/PolyBind.java
index 592d578c4b75..19931a6b63e8 100644
--- a/core/src/main/java/org/apache/druid/guice/PolyBind.java
+++ b/core/src/main/java/org/apache/druid/guice/PolyBind.java
@@ -43,8 +43,8 @@
* Provides the ability to create "polymorphic" bindings where the polymorphism is actually just making a decision
* based on a value in Properties.
*
- * The workflow is that you first create a choice by calling {@link #createChoice()}. Then you create options using
- * the binder returned by the {@link #optionBinder()} method. Multiple different modules can call
+ * The workflow is that you first create a choice by calling {@code createChoice()}. Then you create options using
+ * the binder returned by the {@code optionBinder()} method. Multiple different modules can call
* {@code optionBinder()} and all options will be reflected at injection time as long as equivalent interface
* {@code Key} objects are passed into the various methods.
*/
diff --git a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java
index dd9c384e91a5..38fb843a34c0 100644
--- a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java
+++ b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java
@@ -41,6 +41,7 @@
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
+import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -249,6 +250,7 @@ public void testWithCacheAndFetch() throws IOException
}
@Test
+ @Ignore("See issue #12638")
public void testWithLargeCacheAndSmallFetch() throws IOException
{
final TestPrefetchableTextFilesFirehoseFactory factory =
@@ -336,6 +338,7 @@ public void testTimeout() throws IOException
}
@Test
+ @Ignore("See issue #12638")
public void testReconnectWithCacheAndPrefetch() throws IOException
{
final TestPrefetchableTextFilesFirehoseFactory factory =
diff --git a/core/src/test/java/org/apache/druid/guice/PolyBindTest.java b/core/src/test/java/org/apache/druid/guice/PolyBindTest.java
index d6a50d513960..372d428458d6 100644
--- a/core/src/test/java/org/apache/druid/guice/PolyBindTest.java
+++ b/core/src/test/java/org/apache/druid/guice/PolyBindTest.java
@@ -112,7 +112,7 @@ public void configure(Binder binder)
}
catch (Exception e) {
Assert.assertTrue(e instanceof ProvisionException);
- Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=org.apache.druid.guice.PolyBindTest$Gogo"));
+ Assert.assertTrue(e.getMessage().contains("Unknown provider [c] of Key[type=org.apache.druid.guice.PolyBindTest$Gogo"));
}
try {
Assert.assertEquals("B", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
@@ -120,9 +120,9 @@ public void configure(Binder binder)
}
catch (Exception e) {
Assert.assertTrue(e instanceof ProvisionException);
- Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=org.apache.druid.guice.PolyBindTest$Gogo"));
+ Assert.assertTrue(e.getMessage().contains("Unknown provider [c] of Key[type=org.apache.druid.guice.PolyBindTest$Gogo"));
}
-
+
// test default property value
Assert.assertEquals("B", injector.getInstance(GogoSally.class).go());
props.setProperty("sally", "a");
@@ -136,7 +136,7 @@ public void configure(Binder binder)
}
catch (Exception e) {
Assert.assertTrue(e instanceof ProvisionException);
- Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=org.apache.druid.guice.PolyBindTest$GogoSally"));
+ Assert.assertTrue(e.getMessage().contains("Unknown provider [c] of Key[type=org.apache.druid.guice.PolyBindTest$GogoSally"));
}
}
diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQStagesReport.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQStagesReport.java
index 3900f1540f6d..2b9251c5892c 100644
--- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQStagesReport.java
+++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQStagesReport.java
@@ -22,7 +22,6 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.base.Preconditions;
import org.apache.druid.msq.kernel.QueryDefinition;
import org.apache.druid.msq.kernel.StageDefinition;
@@ -84,7 +83,7 @@ public static MSQStagesReport create(
return new MSQStagesReport(stages);
}
- @JsonValue
+ @JsonProperty("stages")
public List getStages()
{
return stages;
diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQTaskReport.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQTaskReport.java
index d8f59f6a1806..b96a65ae888c 100644
--- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQTaskReport.java
+++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/report/MSQTaskReport.java
@@ -55,6 +55,13 @@ public String getReportKey()
return REPORT_KEY;
}
+ @JsonProperty("type")
+ private String getType()
+ {
+ return REPORT_KEY;
+ }
+
+
@Override
@JsonProperty
public Object getPayload()
diff --git a/integration-tests-ex/README.md b/integration-tests-ex/README.md
index 1e0a85d9c3e6..9c29ec101fe9 100644
--- a/integration-tests-ex/README.md
+++ b/integration-tests-ex/README.md
@@ -32,21 +32,26 @@ an explanation.
### Build Druid
+To make the text a bit simpler, define a variable for the standard settings:
+
+```bash
+export MAVEN_IGNORE=-P skip-static-checks,skip-tests -Dmaven.javadoc.skip=true
+
```bash
-mvn clean package -P dist,skip-static-checks,skip-tests -Dmaven.javadoc.skip=true -T1.0C
+mvn clean package -P dist $MAVEN_IGNORE -T1.0C
```
### Build the Test Image
```bash
cd $DRUID_DEV/integration-tests-ex/image
-mvn -P test-image install
+mvn install -P test-image $MAVEN_IGNORE
```
### Run an IT from the Command Line
```bash
-mvn install -P IT- -pl :druid-it-cases
+mvn verify -P IT- -pl :druid-it-cases $MAVEN_IGNORE
```
Where `` is one of the test categories.
@@ -56,7 +61,8 @@ Or
```bash
cd $DRUID_DEV/integration-tests-ex/cases
mvn verify -P skip-static-checks,docker-tests,IT- \
- -Dmaven.javadoc.skip=true -DskipUTs=true
+ -Dmaven.javadoc.skip=true -DskipUTs=true \
+ -pl :druid-it-cases
```
### Run an IT from the IDE
@@ -75,6 +81,7 @@ test as a JUnit test.
* [Goals](#Goals)
* [Quickstart](docs/quickstart.md)
+* [Create a new test](docs/guide.md)
* [Maven configuration](docs/maven.md)
* [Travis integration](docs/travis.md)
* [Docker image](docs/docker.md)
diff --git a/integration-tests-ex/cases/.gitignore b/integration-tests-ex/cases/.gitignore
new file mode 100644
index 000000000000..ae3c1726048c
--- /dev/null
+++ b/integration-tests-ex/cases/.gitignore
@@ -0,0 +1 @@
+/bin/
diff --git a/integration-tests-ex/cases/cluster.sh b/integration-tests-ex/cases/cluster.sh
index 8f4dcde123e8..0b19b478fcb8 100755
--- a/integration-tests-ex/cases/cluster.sh
+++ b/integration-tests-ex/cases/cluster.sh
@@ -26,67 +26,106 @@
export MODULE_DIR=$(cd $(dirname $0) && pwd)
-USAGE="Usage: $0 category [-h|help|up|down|status|compose-cmd]"
+function usage {
+ cat <&2
+ usage 1>&2
exit 1
fi
-# The untranslated category is used for the local name of the
-# shared folder.
-CATEGORY=$1
+CMD=$1
shift
-# DRUID_INTEGRATION_TEST_GROUP is used in
-# docker-compose files and here. Despite the name, it is the
-# name of the cluster configuration we want to run, not the
-# test category. Multiple categories an map to the same cluster
-# definition.
-
-# Map from category name to shared cluster definition name.
-# Add an entry here if you create a new category that shares
-# a definition.
-case $CATEGORY in
- "InputFormat")
- export DRUID_INTEGRATION_TEST_GROUP=BatchIndex
- ;;
- *)
- export DRUID_INTEGRATION_TEST_GROUP=$CATEGORY
- ;;
-esac
-
-CLUSTER_DIR=$MODULE_DIR/cluster/$DRUID_INTEGRATION_TEST_GROUP
-if [ ! -d $CLUSTER_DIR ]; then
- echo "Cluster directory $CLUSTER_DIR does not exist." 1>&2
- echo "$USAGE" 1>&2
- exit 1
-fi
+function category {
+ if [ $# -eq 0 ]; then
+ usage 1>&2
+ exit 1
+ fi
+ export CATEGORY=$1
+
+ # All commands need env vars
+ ENV_FILE=$MODULE_DIR/../image/target/env.sh
+ if [ ! -f $ENV_FILE ]; then
+ echo "Please build the Docker test image before testing" 1>&2
+ exit 1
+ fi
+
+ source $ENV_FILE
+ # The untranslated category is used for the local name of the
+ # shared folder.
+
+ # DRUID_INTEGRATION_TEST_GROUP is used in
+ # docker-compose files and here. Despite the name, it is the
+ # name of the cluster configuration we want to run, not the
+ # test category. Multiple categories an map to the same cluster
+ # definition.
+
+ # Map from category name to shared cluster definition name.
+ # Add an entry here if you create a new category that shares
+ # a definition.
+ case $CATEGORY in
+ "InputFormat")
+ export DRUID_INTEGRATION_TEST_GROUP=BatchIndex
+ ;;
+ *)
+ export DRUID_INTEGRATION_TEST_GROUP=$CATEGORY
+ ;;
+ esac
+
+ export CLUSTER_DIR=$MODULE_DIR/cluster/$DRUID_INTEGRATION_TEST_GROUP
+ if [ ! -d $CLUSTER_DIR ]; then
+ echo "Cluster directory $CLUSTER_DIR does not exist." 1>&2
+ echo "$USAGE" 1>&2
+ exit 1
+ fi
+
+ export TARGET_DIR=$MODULE_DIR/target
+ export SHARED_DIR=$TARGET_DIR/$CATEGORY
+ export ENV_FILE="$TARGET_DIR/${CATEGORY}.env"
+}
-# 'up' command by default, else whatever is the argument
-CMD='up'
-if [ $# -ge 1 ]; then
- CMD=$1
-fi
+function build_override {
-# All commands need env vars
-ENV_FILE=$MODULE_DIR/../image/target/env.sh
-if [ ! -f $ENV_FILE ]; then
- echo "Please build the Docker test image before testing" 1>&2
- exit 1
-fi
+ mkdir -p target
+ rm -f "$ENV_FILE"
+ touch "$ENV_FILE"
-source $ENV_FILE
+ # User-local settings?
+ LOCAL_ENV="$HOME/druid-it/${CATEGORY}.env"
+ if [ -f "$LOCAL_ENV" ]; then
+ cat "$LOCAL_ENV" >> "$ENV_FILE"
+ fi
-export TARGET_DIR=$MODULE_DIR/target
-export SHARED_DIR=$TARGET_DIR/$CATEGORY
+ # Provided override file
+ if [ -n "$OVERRIDE_ENV" ]; then
+ if [ ! -f "$OVERRIDE_ENV" ]; then
+ echo "Environment override file (OVERRIDE_ENV) not found: $OVERRIDE_ENV" 1>&2
+ exit 1
+ fi
+ cat "$OVERRIDE_ENV" >> "$ENV_FILE"
+ fi
-# Used in docker-compose files
-export OVERRIDE_ENV=${OVERRIDE_ENV:-}
+ # Add all environment variables of the form druid_*
+ env | grep "^druid_" >> "$ENV_FILE"
-# Print environment for debugging
-#env
+ # Reuse the OVERRIDE_ENV variable to pass the full list to Docker compose
+ export OVERRIDE_ENV="$ENV_FILE"
+}
# Dump lots of information to debug Docker failures when run inside
# of a build environment where we can't inspect Docker directly.
@@ -104,46 +143,59 @@ function show_status {
echo "===================================="
}
+function build_shared_dir {
+ mkdir -p $SHARED_DIR
+ # Must start with an empty DB to keep MySQL happy
+ rm -rf $SHARED_DIR/db
+ mkdir -p $SHARED_DIR/logs
+ mkdir -p $SHARED_DIR/tasklogs
+ mkdir -p $SHARED_DIR/db
+ mkdir -p $SHARED_DIR/kafka
+ mkdir -p $SHARED_DIR/resources
+ cp $MODULE_DIR/assets/log4j2.xml $SHARED_DIR/resources
+ # Permissions in some build setups are screwed up. See above. The user
+ # which runs Docker does not have permission to write into the /shared
+ # directory. Force ownership to allow writing.
+ chmod -R a+rwx $SHARED_DIR
+}
+
+# Print environment for debugging
+#env
+
case $CMD in
- "-h" | "-?")
- echo "$USAGE"
+ "-h" )
+ usage
;;
- "help")
- echo "$USAGE"
+ "help" )
+ usage
docker-compose help
;;
- "up")
+ "up" )
+ category $*
echo "Starting cluster $DRUID_INTEGRATION_TEST_GROUP"
- mkdir -p $SHARED_DIR
- # Must start with an empty DB to keep MySQL happy
- rm -rf $SHARED_DIR/db
- mkdir -p $SHARED_DIR/logs
- mkdir -p $SHARED_DIR/tasklogs
- mkdir -p $SHARED_DIR/db
- mkdir -p $SHARED_DIR/kafka
- mkdir -p $SHARED_DIR/resources
- cp $MODULE_DIR/assets/log4j2.xml $SHARED_DIR/resources
- # Permissions in some build setups are screwed up. See above. The user
- # which runs Docker does not have permission to write into the /shared
- # directory. Force ownership to allow writing.
- chmod -R a+rwx $SHARED_DIR
+ build_override
+ build_shared_dir
cd $CLUSTER_DIR
docker-compose up -d
# Enable the following for debugging
- #show_status
+ show_status
;;
- "status")
+ "status" )
+ category $*
cd $CLUSTER_DIR
show_status
;;
- "down")
+ "down" )
+ category $*
# Enable the following for debugging
- #show_status
+ show_status
cd $CLUSTER_DIR
- docker-compose $CMD
+ echo OVERRIDE_ENV="$ENV_FILE" docker-compose $CMD
+ OVERRIDE_ENV="$ENV_FILE" docker-compose $CMD
;;
- "*")
+ "*" )
+ category $*
cd $CLUSTER_DIR
- docker-compose $CMD
+ OVERRIDE_ENV="$ENV_FILE" docker-compose $CMD
;;
esac
diff --git a/integration-tests-ex/cases/cluster/Common/druid.yaml b/integration-tests-ex/cases/cluster/Common/druid.yaml
index b8483c813172..bd5caad2232b 100644
--- a/integration-tests-ex/cases/cluster/Common/druid.yaml
+++ b/integration-tests-ex/cases/cluster/Common/druid.yaml
@@ -60,6 +60,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/overlord.env
+ - ${OVERRIDE_ENV}
coordinator:
image: ${DRUID_IT_IMAGE_NAME}
@@ -76,6 +77,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/coordinator.env
+ - ${OVERRIDE_ENV}
historical:
image: ${DRUID_IT_IMAGE_NAME}
@@ -92,6 +94,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/historical.env
+ - ${OVERRIDE_ENV}
middlemanager:
image: ${DRUID_IT_IMAGE_NAME}
@@ -120,6 +123,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/middlemanager.env
+ - ${OVERRIDE_ENV}
indexer:
image: ${DRUID_IT_IMAGE_NAME}
@@ -136,6 +140,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/indexer.env
+ - ${OVERRIDE_ENV}
broker:
image: ${DRUID_IT_IMAGE_NAME}
@@ -152,6 +157,7 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/broker.env
+ - ${OVERRIDE_ENV}
router:
image: ${DRUID_IT_IMAGE_NAME}
@@ -168,3 +174,4 @@ services:
env_file:
- environment-configs/common.env
- environment-configs/router.env
+ - ${OVERRIDE_ENV}
diff --git a/integration-tests-ex/cases/cluster/Common/environment-configs/common.env b/integration-tests-ex/cases/cluster/Common/environment-configs/common.env
index ad6f9e7d5fc5..fa89e492412b 100644
--- a/integration-tests-ex/cases/cluster/Common/environment-configs/common.env
+++ b/integration-tests-ex/cases/cluster/Common/environment-configs/common.env
@@ -41,13 +41,17 @@ DRUID_INSTANCE=
# Hostname
# druid.host is set on each host by the launch script
-# Extensions specified in the load list will be loaded by Druid
-# Extensions are installed as part of Druid. The prefix
-# must be the same as the value of $DRUID_HOME in the container.
-# Optional as this is the default?
-#druid_extensions_directory=/usr/local/druid/extensions
-#druid_extensions_loadList=["mysql-metadata-storage","druid-basic-security","simple-client-sslcontext","it-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-parquet-extensions","druid-avro-extensions","druid-protobuf-extensions","druid-orc-extensions","druid-kafka-indexing-service","druid-s3-extensions"]
-druid_extensions_loadList=["mysql-metadata-storage","it-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-parquet-extensions","druid-avro-extensions","druid-protobuf-extensions","druid-orc-extensions","druid-kafka-indexing-service","druid-s3-extensions"]
+# Extensions specified in the load list will be loaded by Druid at runtime.
+# The extension jars must be installed as part of Druid, or via the image
+# build script.
+#
+# The launch script creates druid_extensions_loadList by combining two test-specific
+# variables: druid_standard_loadList defined here, and druid_test_loadList, defined
+# in a docker-compose.yaml file, for any test-specific extensions.
+# See compose.md for more details.
+druid_standard_loadList=mysql-metadata-storage,it-tools,druid-lookups-cached-global,druid-histogram,druid-datasketches,druid-parquet-extensions,druid-avro-extensions,druid-protobuf-extensions,druid-orc-extensions,druid-kafka-indexing-service,druid-s3-extensions,druid-multi-stage-query
+
+# Location of Hadoop dependencies provided at runtime in the shared directory.
druid_extensions_hadoopDependenciesDir=/shared/hadoop-dependencies
# Logging
@@ -108,3 +112,6 @@ druid_coordinator_period_metadataStoreManagementPeriod=PT10S
# Testing the legacy config from https://github.com/apache/druid/pull/10267
# Can remove this when the flag is no longer needed
druid_indexer_task_ignoreTimestampSpecForDruidInputSource=true
+
+
+# TODO: Pass this from the test (AzureDeepStorage)
diff --git a/integration-tests-ex/cases/cluster/MultiStageQuery/docker-compose.yaml b/integration-tests-ex/cases/cluster/MultiStageQuery/docker-compose.yaml
new file mode 100644
index 000000000000..f22ff3e6ddbe
--- /dev/null
+++ b/integration-tests-ex/cases/cluster/MultiStageQuery/docker-compose.yaml
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+networks:
+ druid-it-net:
+ name: druid-it-net
+ ipam:
+ config:
+ - subnet: 172.172.172.0/24
+
+services:
+ zookeeper:
+ extends:
+ file: ../Common/dependencies.yaml
+ service: zookeeper
+
+ metadata:
+ extends:
+ file: ../Common/dependencies.yaml
+ service: metadata
+
+ coordinator:
+ extends:
+ file: ../Common/druid.yaml
+ service: coordinator
+ container_name: coordinator
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ - druid_manager_segments_pollDuration=PT5S
+ - druid_coordinator_period=PT10S
+ depends_on:
+ - zookeeper
+ - metadata
+
+ overlord:
+ extends:
+ file: ../Common/druid.yaml
+ service: overlord
+ container_name: overlord
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+ - metadata
+
+ broker:
+ extends:
+ file: ../Common/druid.yaml
+ service: broker
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ router:
+ extends:
+ file: ../Common/druid.yaml
+ service: router
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ historical:
+ extends:
+ file: ../Common/druid.yaml
+ service: historical
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ indexer:
+ extends:
+ file: ../Common/druid.yaml
+ service: indexer
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ volumes:
+ # Test data
+ - ../../resources:/resources
+ depends_on:
+ - zookeeper
diff --git a/integration-tests-ex/cases/pom.xml b/integration-tests-ex/cases/pom.xml
index f1cb1b282daf..dae10239afa3 100644
--- a/integration-tests-ex/cases/pom.xml
+++ b/integration-tests-ex/cases/pom.xml
@@ -45,6 +45,12 @@
druid-integration-tests${project.parent.version}
+
+
+ org.apache.druid.extensions
+ druid-multi-stage-query
+ ${project.parent.version}
+ org.apache.druid
@@ -74,10 +80,19 @@
druid-services${project.parent.version}
+
+ org.apache.druid
+ druid-indexing-service
+ ${project.parent.version}
+ com.google.injectguice
+
+ com.google.inject.extensions
+ guice-multibindings
+ org.apache.curatorcurator-framework
@@ -159,6 +174,24 @@
mysql-metadata-storage${project.parent.version}
+
+ org.apache.druid.extensions
+ druid-azure-extensions
+ ${project.parent.version}
+ provided
+
+
+ org.apache.druid.extensions
+ druid-hdfs-storage
+ ${project.parent.version}
+ provided
+
+
+ com.amazonaws
+ aws-java-sdk-bundle
+
+
+ org.apache.commonscommons-lang3
@@ -174,9 +207,18 @@
test
- junit
- junit
- test
+ com.google.code.findbugs
+ jsr305
+
+
+ junit
+ junit
+ test
+
+
+ pl.pragmatists
+ JUnitParams
+ test
@@ -192,37 +234,72 @@
+
+ org.apache.maven.plugins
+ maven-dependency-plugin
+
+
+
+ org.glassfish.hk2.external:jakarta.inject
+
+
+
+ mysql:mysql-connector-java:jar
+
+
+
-
- IT-HighAvailability
+
+ IT-HighAvailability
+
+ false
+
+
+ HighAvailability
+
+
+
+ IT-BatchIndex
+
+ false
+
+
+ BatchIndex
+
+
+
+ IT-InputFormatfalse
-
- HighAvailability
-
-
-
- IT-BatchIndex
+
+ InputFormat
+
+
+
+ IT-AzureDeepStoragefalse
-
- BatchIndex
-
-
-
- IT-InputFormat
+
+ AzureDeepStorage
+
+
+
+ IT-MultiStageQueryfalse
-
- InputFormat
-
-
+
+ MultiStageQuery
+
+ docker-tests
@@ -281,8 +358,8 @@
bashcluster.sh
- ${it.category}up
+ ${it.category}
@@ -298,8 +375,8 @@
bashcluster.sh
- ${it.category}down
+ ${it.category}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/InputFormat.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/MultiStageQuery.java
similarity index 82%
rename from integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/InputFormat.java
rename to integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/MultiStageQuery.java
index 6e8e182b0c15..e4a8fdc9a0ea 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/InputFormat.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/MultiStageQuery.java
@@ -19,12 +19,7 @@
package org.apache.druid.testsEx.categories;
-import org.apache.druid.testsEx.config.Cluster;
-
-/**
- * Input format category. Uses the same setup as {@link BatchIndex}.
- */
-@Cluster(BatchIndex.class)
-public class InputFormat
+public class MultiStageQuery
{
+
}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfig.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfig.java
index 732746e18a25..363f7648b07f 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfig.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfig.java
@@ -73,6 +73,8 @@ public enum ClusterType
private KafkaConfig kafka;
@JsonProperty("druid")
private Map druidServices;
+ @JsonProperty("settings")
+ private Map settings;
@JsonProperty("properties")
private Map properties;
@JsonProperty("metastoreInit")
@@ -117,6 +119,9 @@ public ClusterConfig(ClusterConfig from)
if (from.properties != null) {
this.properties = new HashMap<>(from.properties);
}
+ if (from.settings != null) {
+ this.settings = new HashMap<>(from.settings);
+ }
if (from.metastoreInit != null) {
this.metastoreInit = new ArrayList<>(from.metastoreInit);
}
@@ -157,9 +162,9 @@ public static ClusterConfig loadFromResource(String resource)
}
}
- public ResolvedConfig resolve()
+ public ResolvedConfig resolve(String clusterName)
{
- return new ResolvedConfig(resolveIncludes());
+ return new ResolvedConfig(clusterName, resolveIncludes());
}
public ClusterConfig resolveIncludes()
@@ -252,6 +257,13 @@ public Map druid()
return druidServices;
}
+ @JsonProperty("settings")
+ @JsonInclude(Include.NON_NULL)
+ public Map settings()
+ {
+ return settings;
+ }
+
@JsonProperty("properties")
@JsonInclude(Include.NON_NULL)
public Map properties()
@@ -310,6 +322,11 @@ public ClusterConfig merge(ClusterConfig overrides)
} else if (overrides.druidServices != null) {
merged.druidServices.putAll(overrides.druidServices);
}
+ if (merged.settings == null) {
+ merged.settings = overrides.settings;
+ } else if (overrides.settings != null) {
+ merged.settings.putAll(overrides.settings);
+ }
if (merged.properties == null) {
merged.properties = overrides.properties;
} else if (overrides.properties != null) {
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
index e71bfd0584b5..1531edff0fd2 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
@@ -19,10 +19,20 @@
package org.apache.druid.testsEx.config;
+import org.apache.druid.testing.IntegrationTestingConfig;
import org.apache.druid.testsEx.config.ClusterConfig.ClusterType;
import org.apache.druid.testsEx.config.ResolvedService.ResolvedZk;
import org.junit.Test;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+import java.util.Properties;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -33,17 +43,17 @@
public class ClusterConfigTest
{
@Test
- public void testYaml()
+ public void testYaml() throws FileNotFoundException
{
ClusterConfig config = ClusterConfig.loadFromResource("/config-test/test.yaml");
// Uncomment this line to see the full config with includes resolved.
//System.out.println(config.resolveIncludes());
- ResolvedConfig resolved = config.resolve();
+ ResolvedConfig resolved = config.resolve("Test");
assertEquals(ClusterType.docker, resolved.type());
assertEquals(ResolvedConfig.DEFAULT_READY_TIMEOUT_SEC, resolved.readyTimeoutSec());
assertEquals(ResolvedConfig.DEFAULT_READY_POLL_MS, resolved.readyPollMs());
- assertEquals(1, resolved.properties().size());
+ assertEquals(3, resolved.properties().size());
ResolvedZk zk = resolved.zk();
assertNotNull(zk);
@@ -74,5 +84,40 @@ public void testYaml()
assertEquals("router", service.service());
assertEquals("http://localhost:8888", service.clientUrl());
assertEquals("http://localhost:8888", resolved.routerUrl());
+
+ File userEnv = new File(
+ new File(
+ System.getProperty("user.home"),
+ "druid-it"),
+ "Test.env");
+ try (PrintWriter out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(userEnv), StandardCharsets.UTF_8))) {
+ out.println("druid_user_var=user");
+ }
+
+ System.setProperty("druid_sys_prop", "sys");
+ Map props = resolved.toProperties();
+ // Added from ZK section
+ assertEquals("localhost:2181", props.get("druid.zk.service.zkHosts"));
+ // Generic property
+ assertEquals("howdy", props.get("my.test.property"));
+ // Mapped from settings
+ assertEquals("myBucket", props.get("druid.test.config.cloudBucket"));
+ assertEquals("myPath", props.get("druid.test.config.cloudPath"));
+ assertEquals("secret", props.get("druid.test.config.s3AccessKey"));
+ // From settings, overridden in properties
+ assertEquals("myRegion", props.get("druid.test.config.cloudRegion"));
+ // System property
+ assertEquals("sys", props.get("druid.test.config.sys_prop"));
+ // From user override
+ assertEquals("user", props.get("druid.test.config.user_var"));
+
+ // Test plumbing through the test config
+ Properties properties = new Properties();
+ properties.putAll(props);
+ IntegrationTestingConfig testingConfig = new IntegrationTestingConfigEx(resolved, properties);
+ assertEquals("myBucket", testingConfig.getCloudBucket());
+ assertEquals("myPath", testingConfig.getCloudPath());
+ // From settings, overridden in properties
+ assertEquals("myRegion", testingConfig.getCloudRegion());
}
}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/DruidTestRunner.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/DruidTestRunner.java
index 0d44eb418f46..5e65ec7f9d4e 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/DruidTestRunner.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/DruidTestRunner.java
@@ -19,9 +19,9 @@
package org.apache.druid.testsEx.config;
+import junitparams.JUnitParamsRunner;
import org.apache.druid.java.util.common.UOE;
import org.junit.experimental.categories.Category;
-import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
@@ -42,8 +42,10 @@
* test members before starting the lifecycle, so that the injection creates
* a reference, which creates the object, which registers it in the lifecycle. We
* should fix this issue. Until then, the awkwardness is hidden in this test runner.
+ *
+ * Extends the parameterize test runner, so your Druid ITs can also use parameters.
*/
-public class DruidTestRunner extends BlockJUnit4ClassRunner
+public class DruidTestRunner extends JUnitParamsRunner
{
private class CloseInitializer extends Statement
{
@@ -87,7 +89,7 @@ protected Object createTest() throws Exception
private Initializer buildInitializer(Object test)
{
Class> testClass = test.getClass();
- Category annotations[] = testClass.getAnnotationsByType(Category.class);
+ Category[] annotations = testClass.getAnnotationsByType(Category.class);
if (annotations.length == 0) {
throw new UOE(
"Class % must have a @Category annotation",
@@ -100,7 +102,7 @@ private Initializer buildInitializer(Object test)
testClass.getSimpleName()
);
}
- Class> categories[] = annotations[0].value();
+ Class>[] categories = annotations[0].value();
if (categories.length == 0) {
throw new UOE(
"Class % must have a @Category value",
@@ -148,7 +150,7 @@ private Initializer buildInitializer(Object test)
*/
private Class> category(Class> testClass)
{
- Category annotations[] = testClass.getAnnotationsByType(Category.class);
+ Category[] annotations = testClass.getAnnotationsByType(Category.class);
if (annotations.length == 0) {
throw new UOE(
"Class % must have a @Category annotation",
@@ -161,7 +163,7 @@ private Class> category(Class> testClass)
testClass.getSimpleName()
);
}
- Class> categories[] = annotations[0].value();
+ Class>[] categories = annotations[0].value();
if (categories.length == 0) {
throw new UOE(
"Class % must have a @Category value",
@@ -184,7 +186,7 @@ private Class> category(Class> testClass)
private String inferCluster(Class> category)
{
String categoryName = category.getSimpleName();
- Cluster annotations[] = category.getAnnotationsByType(Cluster.class);
+ Cluster[] annotations = category.getAnnotationsByType(Cluster.class);
if (annotations.length == 0) {
return categoryName;
}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/Initializer.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/Initializer.java
index bcf7ba7666af..a2899a084488 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/Initializer.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/Initializer.java
@@ -70,11 +70,13 @@
import org.apache.druid.metadata.storage.mysql.MySQLMetadataStorageModule;
import org.apache.druid.server.DruidNode;
import org.apache.druid.testing.IntegrationTestingConfig;
+import org.apache.druid.testing.IntegrationTestingConfigProvider;
import org.apache.druid.testing.guice.TestClient;
import org.apache.druid.testsEx.cluster.DruidClusterClient;
import org.apache.druid.testsEx.cluster.MetastoreClient;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -132,7 +134,8 @@ public void configure(Binder binder)
.toInstance(config);
binder
.bind(IntegrationTestingConfig.class)
- .toInstance(config.toIntegrationTestingConfig());
+ .to(IntegrationTestingConfigEx.class)
+ .in(LazySingleton.class);
binder
.bind(MetastoreClient.class)
.in(LazySingleton.class);
@@ -265,15 +268,24 @@ public Builder(String clusterName)
property("druid.client.https.keyManagerPassword", "druid123");
property("druid.client.https.keyStorePassword", "druid123");
+ // More env var bindings for properties formerly passed in via
+ // a generated config file.
+ final String base = IntegrationTestingConfigProvider.PROPERTY_BASE + ".";
+ propertyEnvVarBinding(base + "cloudBucket", "DRUID_CLOUD_BUCKET");
+ propertyEnvVarBinding(base + "cloudPath", "DRUID_CLOUD_PATH");
+ propertyEnvVarBinding(base + "s3AccessKey", "AWS_ACCESS_KEY_ID");
+ propertyEnvVarBinding(base + "s3SecretKey", "AWS_SECRET_ACCESS_KEY");
+ propertyEnvVarBinding(base + "azureContainer", "AZURE_CONTAINER");
+ propertyEnvVarBinding(base + "azureAccount", "AZURE_ACCOUNT");
+ propertyEnvVarBinding(base + "azureKey", "AZURE_KEY");
+ propertyEnvVarBinding(base + "googleBucket", "GOOGLE_BUCKET");
+ propertyEnvVarBinding(base + "googlePrefix", "GOOGLE_PREFIX");
+
// Other defaults
// druid.global.http.numMaxThreads avoids creating 40+ Netty threads.
// We only ever use 1.
property("druid.global.http.numMaxThreads", 3);
property("druid.broker.http.numMaxThreads", 3);
-
- // druid.test.config.dockerIp is used by some older test code. Remove
- // it when that code is updated.
- property("druid.test.config.dockerIp", "localhost");
}
/**
@@ -323,10 +335,7 @@ public Builder modules(List modules)
public Builder modules(Module...modules)
{
- for (Module module : modules) {
- this.modules.add(module);
- }
- return this;
+ return modules(Arrays.asList(modules));
}
/**
@@ -388,7 +397,7 @@ public synchronized Initializer build()
private Initializer(Builder builder)
{
if (builder.configFile != null) {
- this.clusterConfig = loadConfigFile(builder.configFile);
+ this.clusterConfig = loadConfigFile(builder.clusterName, builder.configFile);
} else {
this.clusterConfig = loadConfig(builder.clusterName, builder.configFile);
}
@@ -443,13 +452,13 @@ private static ResolvedConfig loadConfig(String category, String configName)
}
String loadName = StringUtils.format(CLUSTER_CONFIG_RESOURCE, category, configName);
ClusterConfig config = ClusterConfig.loadFromResource(loadName);
- return config.resolve();
+ return config.resolve(category);
}
- private static ResolvedConfig loadConfigFile(String path)
+ private static ResolvedConfig loadConfigFile(String category, String path)
{
ClusterConfig config = ClusterConfig.loadFromFile(path);
- return config.resolve();
+ return config.resolve(category);
}
private static Injector makeInjector(
@@ -488,7 +497,7 @@ private static Injector makeInjector(
}
/**
- * Define test properties similar to how the server does. Property precendence
+ * Define test properties similar to how the server does. Property precedence
* is:
*
*
System properties (highest)
@@ -512,6 +521,8 @@ private static Properties properties(
}
}
finalProperties.putAll(System.getProperties());
+ log.info("Properties:");
+ log.info(finalProperties.toString());
return finalProperties;
}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/IntegrationTestingConfigEx.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/IntegrationTestingConfigEx.java
new file mode 100644
index 000000000000..c14ea745aa81
--- /dev/null
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/IntegrationTestingConfigEx.java
@@ -0,0 +1,421 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testsEx.config;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.testing.IntegrationTestingConfig;
+import org.apache.druid.testing.IntegrationTestingConfigProvider;
+
+import javax.inject.Inject;
+
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Adapter to the "legacy" cluster configuration used by tests.
+ */
+class IntegrationTestingConfigEx implements IntegrationTestingConfig
+{
+ private final ResolvedConfig config;
+ private final Map properties;
+
+ @Inject
+ public IntegrationTestingConfigEx(
+ final ResolvedConfig config,
+ final Properties properties)
+ {
+ this.config = config;
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Map.Entry