diff --git a/.github/workflows/ci-integration.yml b/.github/workflows/ci-integration.yml
index 80940cc9..75d3aa7c 100644
--- a/.github/workflows/ci-integration.yml
+++ b/.github/workflows/ci-integration.yml
@@ -64,21 +64,46 @@ jobs:
psutil
""" > requirements.txt
- name: Set up Python
- uses: actions/setup-python@v3
+ uses: MatteoH2O1999/setup-python@v1
with:
- python-version: '2.x'
+ python-version: '2.7'
cache: 'pip'
+ allow-build: info
+ cache-build: true
- name: Setup CCM private
if: ${{ matrix.cassandra.is_dse }}
env:
CLONE_RIPTANO_CCM_SECRET: ${{secrets.CLONE_RIPTANO_CCM_SECRET}}
+ TEST: ${{secrets.TEST}}
+ GB_TOKEN: ${{secrets.GB_TOKEN}}
run: |
#!/bin/bash
set -e
- git clone https://${CLONE_RIPTANO_CCM_SECRET}@github.com/riptano/ccm-private.git
+ if [ "GB_TOKEN" == "" ]; then
+ echo "GB_TOKEN=NO"
+ else
+ echo "GB_TOKEN=YES"
+ fi
+
+ echo $TEST
+ if [ "$TEST" == "" ]; then
+ echo "envsecret=NO"
+ else
+ echo "envsecret=YES"
+ fi
+
+ if [ "$CLONE_RIPTANO_CCM_SECRET" == "" ]; then
+ echo "secretspresent=NO"
+ else
+ echo "secretspresent=YES"
+ fi
+ echo "Cloning the repo"
+ git clone https://${GB_TOKEN}:x-oauth-basic@github.com/riptano/ccm-private.git
cd ccm-private
+ echo "pip install requirements"
pip install -r requirements.txt
+ echo "setup.py install"
./setup.py install
- name: Setup CCM
if: ${{ !matrix.cassandra.is_dse }}
@@ -117,7 +142,7 @@ jobs:
export CCM_CASSANDRA_VERSION=${{ matrix.cassandra.ccm_version }}
export CCM_VERSION=${{ matrix.cassandra.ccm_version }}
- mvn verify -Pmedium -pl pulsar-impl \
+ mvn verify -Pmedium -pl sink \
-Ddsbulk.ccm.CCM_VERSION=$CCM_VERSION \
-Ddsbulk.ccm.CCM_IS_DSE=$CCM_IS_DSE \
-Ddsbulk.ccm.JAVA_HOME="$JDK8_PATH" \
@@ -146,7 +171,7 @@ jobs:
env
- mvn verify -Pmedium -pl tests \
+ mvn verify -Pmedium -pl sink \
-Ddsbulk.ccm.CCM_VERSION=$CCM_VERSION \
-Ddsbulk.ccm.CCM_IS_DSE=$CCM_IS_DSE \
-Ddsbulk.ccm.JAVA_HOME="$JDK8_PATH" \
diff --git a/pom.xml b/pom.xml
index 48d4f61f..d7f4b215 100644
--- a/pom.xml
+++ b/pom.xml
@@ -40,12 +40,12 @@
1.0.15
1.8
8
- 2.4.0
- 5.2.1
+ 3.7.2
+ 7.7.2
2.6.2
4.16.0
1.10.0
- 1.0.3
+ 1.0.4
25.1-jre
1.7.25
1.2.3
@@ -61,8 +61,8 @@
2
1.3
4.0.1
- 4.1.39.Final
- 2.0.25.Final
+ 4.1.77.Final
+ 2.0.52.Final
4.0.2
1.6.0
1.1.7.2
diff --git a/sink/src/it/java/com/datastax/oss/kafka/sink/cloud/CloudSniEndToEndIT.java b/sink/src/it/java/com/datastax/oss/kafka/sink/cloud/CloudSniEndToEndIT.java
index a3d65ede..0d4d2530 100644
--- a/sink/src/it/java/com/datastax/oss/kafka/sink/cloud/CloudSniEndToEndIT.java
+++ b/sink/src/it/java/com/datastax/oss/kafka/sink/cloud/CloudSniEndToEndIT.java
@@ -48,6 +48,7 @@
import org.apache.kafka.connect.sink.SinkRecord;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
@@ -59,6 +60,8 @@
@ExtendWith(WiremockResolver.class)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@Tag("medium")
+// per https://github.com/datastax/dsbulk/commit/0c91fdc4aa3e867decf4f080353b3bba468ce911
+@Disabled(value = "SNI Proxy image not available anymore in CI")
public class CloudSniEndToEndIT extends ITConnectorBase {
private final SNIProxyServer proxy;
diff --git a/sink/src/it/java/com/datastax/oss/kafka/sink/simulacron/SimpleEndToEndSimulacronIT.java b/sink/src/it/java/com/datastax/oss/kafka/sink/simulacron/SimpleEndToEndSimulacronIT.java
index cf24053b..32f75bd8 100644
--- a/sink/src/it/java/com/datastax/oss/kafka/sink/simulacron/SimpleEndToEndSimulacronIT.java
+++ b/sink/src/it/java/com/datastax/oss/kafka/sink/simulacron/SimpleEndToEndSimulacronIT.java
@@ -85,7 +85,7 @@
@ExtendWith(StreamInterceptingExtension.class)
@ExtendWith(LogInterceptingExtension.class)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-@SimulacronConfig(dseVersion = "5.0.8")
+@SimulacronConfig
class SimpleEndToEndSimulacronIT {
private static final String INSERT_STATEMENT =
@@ -332,14 +332,7 @@ void fail_prepare_counter_table() {
ImmutableMap props =
ImmutableMap.builder()
- .put("name", INSTANCE_NAME)
- .put("contactPoints", connectorProperties.get("contactPoints"))
- .put("port", connectorProperties.get("port"))
- .put("loadBalancing.localDc", "dc1")
- // since we upgraded to Driver 4.16.x, we need to explicitly set the protocol version
- // otherwise it will try only DSE_v1 and DSE_v2 because they are not considered "BETA"
- // https://github.com/datastax/java-driver/blob/4270f93277249abb513bc2abf2ff7a7c481b1d0d/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java#L163
- .put("datastax-java-driver.advanced.protocol.version", "V4")
+ .putAll(connectorProperties)
.put("topic.mytopic.ks1.mycounter.mapping", "a=key, b=value, c=value.f2")
.build();
assertThatThrownBy(() -> task.start(props))
@@ -367,14 +360,7 @@ void fail_delete() {
.build()));
simulacron.prime(when(bad1).then(serverError("bad thing")));
Map connProps = new HashMap<>();
- connProps.put("name", INSTANCE_NAME);
- connProps.put("contactPoints", hostname);
- // since we upgraded to Driver 4.16.x, we need to explicitly set the protocol version
- // otherwise it will try only DSE_v1 and DSE_v2 because they are not considered "BETA"
- // https://github.com/datastax/java-driver/blob/4270f93277249abb513bc2abf2ff7a7c481b1d0d/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java#L163
- connProps.put("datastax-java-driver.advanced.protocol.version", "V4");
- connProps.put("port", port);
- connProps.put("loadBalancing.localDc", "dc1");
+ connProps.putAll(connectorProperties);
connProps.put(
"topic.mytopic.ks1.mycounter.mapping", "a=value.bigint, b=value.text, c=value.int");
diff --git a/sink/src/test/java/com/datastax/oss/kafka/sink/AvroJsonConvertersTest.java b/sink/src/test/java/com/datastax/oss/kafka/sink/AvroJsonConvertersTest.java
index 1996dab6..d926b86f 100644
--- a/sink/src/test/java/com/datastax/oss/kafka/sink/AvroJsonConvertersTest.java
+++ b/sink/src/test/java/com/datastax/oss/kafka/sink/AvroJsonConvertersTest.java
@@ -25,23 +25,17 @@
import com.fasterxml.jackson.databind.node.DoubleNode;
import com.fasterxml.jackson.databind.node.TextNode;
import io.confluent.connect.avro.AvroConverter;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
-import io.confluent.kafka.serializers.AvroSchemaUtils;
-import java.io.ByteArrayInputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.nio.ByteBuffer;
import java.util.Base64;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Stream;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.io.BinaryDecoder;
-import org.apache.avro.io.DatumReader;
-import org.apache.avro.io.DecoderFactory;
import org.apache.kafka.connect.data.Decimal;
import org.apache.kafka.connect.data.Schema;
+import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.json.DecimalFormat;
import org.apache.kafka.connect.json.JsonConverter;
@@ -49,7 +43,6 @@
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
-import org.mockito.Mockito;
/**
* Tests to validate discussion in KAF-91. These tests are intended to prove that the behaviour
@@ -161,7 +154,7 @@ public void should_keep_big_decimal_with_json_converter_decimal_format_numeric()
public void should_convert_big_decimal_to_bytes_with_avro_converter() throws Exception {
String topic = "topic";
- AvroConverter converter = new AvroConverter(Mockito.mock(SchemaRegistryClient.class));
+ AvroConverter converter = new AvroConverter(new MockSchemaRegistryClient());
converter.configure(
Collections.singletonMap(
AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "localhost"),
@@ -172,29 +165,15 @@ public void should_convert_big_decimal_to_bytes_with_avro_converter() throws Exc
new SchemaBuilder(Schema.Type.BYTES)
.name(Decimal.LOGICAL_NAME)
.parameter(Decimal.SCALE_FIELD, Integer.toString(expected.scale()))
+ .required()
.build();
- // Root conversion operation
byte[] convertedBytes = converter.fromConnectData(topic, schema, expected);
- // AvroConverter winds up adding 5 extra bytes, a "magic" byte + a 4 byte ID value, so strip
- // those here. See AbstractKafkaAvroSerializer for more detail.
- ByteArrayInputStream stream =
- new ByteArrayInputStream(convertedBytes, 5, convertedBytes.length - 5);
-
- org.apache.avro.Schema bytesSchema = AvroSchemaUtils.getSchema(convertedBytes);
-
// Confirm that we can read the contents of the connect data as a byte array (not by itself an
// impressive feat) _and_ that the bytes in this array represent the unscaled value of the
// expected BigInteger
- BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(stream, null);
- DatumReader