From 153828889359e90563387a7e2c262e12d2ccf6e5 Mon Sep 17 00:00:00 2001 From: David Venable Date: Mon, 1 Jul 2024 15:54:53 -0500 Subject: [PATCH 1/7] Updates our usage of the Apache Parquet project to use their new interfaces over the old Hadoop ones. This allows use to be ready to extract Hadoop as other changes are made to the Parquet project. Remove some Hadoop transitive dependencies and make Hadoop runtime only where possible. Added a test for INT96, clean up some test files. Contributes toward #4612. (#4623) Signed-off-by: David Venable --- .../parquet-codecs/build.gradle | 16 +++-- .../codec/parquet/ParquetInputCodec.java | 10 +-- .../codec/parquet/ParquetInputCodecTest.java | 64 +++++++++++++++++- .../resources/sample.snappy.parquet | Bin .../resources/test-parquet.parquet | Bin data-prepper-plugins/s3-sink/build.gradle | 5 +- .../sink/s3/ParquetOutputScenario.java | 3 +- .../plugins/sink/s3/S3SinkServiceIT.java | 7 +- .../codec/parquet/ParquetOutputCodecTest.java | 7 +- 9 files changed, 92 insertions(+), 20 deletions(-) rename data-prepper-plugins/parquet-codecs/src/{main => test}/resources/sample.snappy.parquet (100%) rename data-prepper-plugins/parquet-codecs/src/{main => test}/resources/test-parquet.parquet (100%) diff --git a/data-prepper-plugins/parquet-codecs/build.gradle b/data-prepper-plugins/parquet-codecs/build.gradle index ea783c53d4..dd59e28068 100644 --- a/data-prepper-plugins/parquet-codecs/build.gradle +++ b/data-prepper-plugins/parquet-codecs/build.gradle @@ -7,16 +7,24 @@ dependencies { implementation project(':data-prepper-api') implementation project(':data-prepper-plugins:common') implementation libs.avro.core - implementation libs.hadoop.common - implementation(libs.hadoop.mapreduce) { - exclude group: 'org.apache.hadoop', module: 'hadoop-hdfs-client' - } + implementation 'org.apache.commons:commons-text:1.11.0' implementation 'org.apache.parquet:parquet-avro:1.14.0' implementation 'org.apache.parquet:parquet-column:1.14.0' implementation 'org.apache.parquet:parquet-common:1.14.0' implementation 'org.apache.parquet:parquet-hadoop:1.14.0' + runtimeOnly(libs.hadoop.common) { + exclude group: 'org.eclipse.jetty' + exclude group: 'org.apache.hadoop', module: 'hadoop-auth' + } + runtimeOnly(libs.hadoop.mapreduce) { + exclude group: 'org.apache.hadoop', module: 'hadoop-hdfs-client' + } testImplementation project(':data-prepper-test-common') testImplementation project(':data-prepper-test-event') + testImplementation(libs.hadoop.common) { + exclude group: 'org.eclipse.jetty' + exclude group: 'org.apache.hadoop', module: 'hadoop-auth' + } constraints { implementation('com.nimbusds:nimbus-jose-jwt') { diff --git a/data-prepper-plugins/parquet-codecs/src/main/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodec.java b/data-prepper-plugins/parquet-codecs/src/main/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodec.java index fa9876f114..e85e0c9926 100644 --- a/data-prepper-plugins/parquet-codecs/src/main/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodec.java +++ b/data-prepper-plugins/parquet-codecs/src/main/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodec.java @@ -6,8 +6,9 @@ package org.opensearch.dataprepper.plugins.codec.parquet; import org.apache.avro.generic.GenericRecord; -import org.apache.hadoop.conf.Configuration; import org.apache.parquet.avro.AvroParquetReader; +import org.apache.parquet.conf.ParquetConfiguration; +import org.apache.parquet.conf.PlainParquetConfiguration; import org.apache.parquet.hadoop.ParquetReader; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; @@ -46,13 +47,13 @@ public class ParquetInputCodec implements InputCodec { private static final Logger LOG = LoggerFactory.getLogger(ParquetInputCodec.class); - private final Configuration configuration; + private final ParquetConfiguration configuration; private final EventFactory eventFactory; @DataPrepperPluginConstructor public ParquetInputCodec(final EventFactory eventFactory) { this.eventFactory = eventFactory; - configuration = new Configuration(); + configuration = new PlainParquetConfiguration(); configuration.setBoolean(READ_INT96_AS_FIXED, true); } @@ -80,8 +81,7 @@ public void parse(final InputFile inputFile, final DecompressionEngine decompres } private void parseParquetFile(final InputFile inputFile, final Consumer> eventConsumer) throws IOException { - try (ParquetReader reader = AvroParquetReader.builder(inputFile) - .withConf(this.configuration) + try (ParquetReader reader = AvroParquetReader.builder(inputFile, this.configuration) .build()) { GenericRecordJsonEncoder encoder = new GenericRecordJsonEncoder(); GenericRecord record = null; diff --git a/data-prepper-plugins/parquet-codecs/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodecTest.java b/data-prepper-plugins/parquet-codecs/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodecTest.java index 1510ad75cc..5ae5f82d0d 100644 --- a/data-prepper-plugins/parquet-codecs/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodecTest.java +++ b/data-prepper-plugins/parquet-codecs/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetInputCodecTest.java @@ -8,8 +8,17 @@ import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; import org.apache.parquet.avro.AvroParquetWriter; +import org.apache.parquet.conf.PlainParquetConfiguration; +import org.apache.parquet.example.data.Group; +import org.apache.parquet.example.data.simple.NanoTime; +import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.hadoop.ParquetReader; import org.apache.parquet.hadoop.ParquetWriter; +import org.apache.parquet.hadoop.example.ExampleParquetWriter; +import org.apache.parquet.hadoop.example.GroupWriteSupport; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.PrimitiveType; +import org.apache.parquet.schema.Type; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -33,12 +42,15 @@ import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Paths; +import java.time.OffsetDateTime; +import java.time.temporal.JulianFields; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Consumer; +import static org.apache.parquet.avro.AvroWriteSupport.WRITE_FIXED_AS_INT96; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; @@ -161,6 +173,22 @@ public void parseInputFile_parsesCorrectly() throws IOException { assertRecordsCorrect(actualRecords); } + @Test + public void parseInputStream_parsesCorrectly_with_int96() throws IOException { + final File testDataFile = File.createTempFile(FILE_PREFIX + "-int96-", FILE_SUFFIX); + testDataFile.deleteOnExit(); + generateTestDataInt96(testDataFile); + InputStream targetStream = new FileInputStream(testDataFile); + + parquetInputCodec.parse(targetStream, mockConsumer); + + final ArgumentCaptor> recordArgumentCaptor = ArgumentCaptor.forClass(Record.class); + verify(mockConsumer, times(10)).accept(recordArgumentCaptor.capture()); + + final List> actualRecords = recordArgumentCaptor.getAllValues(); + assertThat(actualRecords.size(), equalTo(10)); + } + @Test public void parseInputFile_snappyInputFile() throws IOException, URISyntaxException { URL resource = getClass().getClassLoader().getResource("sample.snappy.parquet"); @@ -203,8 +231,10 @@ public void parseInputFile_testParquetFile() throws IOException, URISyntaxExcept private static void generateTestData(final File file) throws IOException { Schema schema = new Schema.Parser().parse(SCHEMA_JSON); - ParquetWriter writer = AvroParquetWriter.builder(new LocalOutputFile(file)) + final ParquetWriter writer = AvroParquetWriter.builder(new LocalOutputFile(file)) .withSchema(schema) + .withConf(new PlainParquetConfiguration()) + .withEncryption(null) .build(); for (int i = 0; i < 10; i++) { @@ -220,6 +250,34 @@ private static void generateTestData(final File file) throws IOException { writer.close(); } + /** + * Generates a Parquet file with INT96 data. This must use the example + * schema rather than Avro, or it would not correctly reproduce possible INT96 + * error. + * + * @param file The file for Parquet + */ + private static void generateTestDataInt96(final File file) throws IOException { + final MessageType schema = new MessageType("test", List.of( + new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveType.PrimitiveTypeName.INT96, "my_timestamp_value") + )); + final PlainParquetConfiguration conf = new PlainParquetConfiguration(); + conf.setStrings(WRITE_FIXED_AS_INT96, "my_timestamp_value"); + conf.set(GroupWriteSupport.PARQUET_EXAMPLE_SCHEMA, schema.toString()); + final ParquetWriter writer = ExampleParquetWriter.builder(new LocalOutputFile(file)) + .withConf(conf) + .withEncryption(null) + .build(); + + for (int i = 0; i < 10; i++) { + final Group group = new SimpleGroup(schema); + group.add("my_timestamp_value", createInt96()); + + writer.write(group); + } + writer.close(); + } + private void assertRecordsCorrect(final List> records) { assertThat(records.size(), equalTo(10)); for (int i = 0; i < 10; i++) { @@ -240,5 +298,9 @@ private void assertRecordsCorrect(final List> records) { assertThat(record.getData().getMetadata().getEventType(), equalTo(EVENT_TYPE)); } } + + private static NanoTime createInt96() { + return new NanoTime((int) OffsetDateTime.now().getLong(JulianFields.JULIAN_DAY), System.nanoTime()); + } } diff --git a/data-prepper-plugins/parquet-codecs/src/main/resources/sample.snappy.parquet b/data-prepper-plugins/parquet-codecs/src/test/resources/sample.snappy.parquet similarity index 100% rename from data-prepper-plugins/parquet-codecs/src/main/resources/sample.snappy.parquet rename to data-prepper-plugins/parquet-codecs/src/test/resources/sample.snappy.parquet diff --git a/data-prepper-plugins/parquet-codecs/src/main/resources/test-parquet.parquet b/data-prepper-plugins/parquet-codecs/src/test/resources/test-parquet.parquet similarity index 100% rename from data-prepper-plugins/parquet-codecs/src/main/resources/test-parquet.parquet rename to data-prepper-plugins/parquet-codecs/src/test/resources/test-parquet.parquet diff --git a/data-prepper-plugins/s3-sink/build.gradle b/data-prepper-plugins/s3-sink/build.gradle index 5a6b174900..638b8246a5 100644 --- a/data-prepper-plugins/s3-sink/build.gradle +++ b/data-prepper-plugins/s3-sink/build.gradle @@ -19,7 +19,10 @@ dependencies { implementation 'org.jetbrains.kotlin:kotlin-stdlib:1.9.22' implementation project(':data-prepper-plugins:avro-codecs') implementation libs.avro.core - implementation libs.hadoop.common + implementation(libs.hadoop.common) { + exclude group: 'org.eclipse.jetty' + exclude group: 'org.apache.hadoop', module: 'hadoop-auth' + } implementation 'org.apache.parquet:parquet-avro:1.14.0' implementation 'software.amazon.awssdk:apache-client' implementation 'org.jetbrains.kotlin:kotlin-stdlib-common:1.9.22' diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java index 6e3abc3250..e01c61fe09 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java @@ -9,6 +9,7 @@ import org.apache.avro.util.Utf8; import org.apache.parquet.ParquetReadOptions; import org.apache.parquet.avro.AvroParquetReader; +import org.apache.parquet.conf.PlainParquetConfiguration; import org.apache.parquet.hadoop.ParquetFileReader; import org.apache.parquet.hadoop.ParquetReader; import org.apache.parquet.hadoop.metadata.BlockMetaData; @@ -65,7 +66,7 @@ public void validate(int expectedRecords, final List> sample int validatedRecords = 0; int count = 0; - try (final ParquetReader reader = AvroParquetReader.builder(inputFile) + try (final ParquetReader reader = AvroParquetReader.builder(inputFile, new PlainParquetConfiguration()) .build()) { GenericRecord record; diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkServiceIT.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkServiceIT.java index 739ac876df..b7bbb1b97d 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkServiceIT.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkServiceIT.java @@ -12,8 +12,6 @@ import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; import org.apache.commons.io.IOUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; import org.apache.parquet.ParquetReadOptions; import org.apache.parquet.column.page.PageReadStore; import org.apache.parquet.example.data.Group; @@ -21,8 +19,8 @@ import org.apache.parquet.example.data.simple.convert.GroupRecordConverter; import org.apache.parquet.hadoop.ParquetFileReader; import org.apache.parquet.hadoop.metadata.ParquetMetadata; -import org.apache.parquet.hadoop.util.HadoopInputFile; import org.apache.parquet.io.ColumnIOFactory; +import org.apache.parquet.io.LocalInputFile; import org.apache.parquet.io.MessageColumnIO; import org.apache.parquet.io.RecordReader; import org.apache.parquet.schema.MessageType; @@ -79,6 +77,7 @@ import java.io.InputStream; import java.nio.charset.Charset; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.time.Duration; import java.util.ArrayList; @@ -413,7 +412,7 @@ private List> createParquetRecordsList(final InputStream final File tempFile = File.createTempFile(FILE_NAME, FILE_SUFFIX); Files.copy(inputStream, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING); List> actualRecordList = new ArrayList<>(); - try (ParquetFileReader parquetFileReader = new ParquetFileReader(HadoopInputFile.fromPath(new Path(tempFile.toURI()), new Configuration()), ParquetReadOptions.builder().build())) { + try (final ParquetFileReader parquetFileReader = new ParquetFileReader(new LocalInputFile(Path.of(tempFile.toURI())), ParquetReadOptions.builder().build())) { final ParquetMetadata footer = parquetFileReader.getFooter(); final MessageType schema = createdParquetSchema(footer); PageReadStore pages; diff --git a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetOutputCodecTest.java b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetOutputCodecTest.java index b441a7a6e3..d6b4160888 100644 --- a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetOutputCodecTest.java +++ b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/codec/parquet/ParquetOutputCodecTest.java @@ -6,8 +6,6 @@ import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; import org.apache.parquet.ParquetReadOptions; import org.apache.parquet.column.page.PageReadStore; import org.apache.parquet.example.data.Group; @@ -15,8 +13,8 @@ import org.apache.parquet.example.data.simple.convert.GroupRecordConverter; import org.apache.parquet.hadoop.ParquetFileReader; import org.apache.parquet.hadoop.metadata.ParquetMetadata; -import org.apache.parquet.hadoop.util.HadoopInputFile; import org.apache.parquet.io.ColumnIOFactory; +import org.apache.parquet.io.LocalInputFile; import org.apache.parquet.io.MessageColumnIO; import org.apache.parquet.io.RecordReader; import org.apache.parquet.schema.MessageType; @@ -46,6 +44,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; @@ -556,7 +555,7 @@ private List> createParquetRecordsList(final InputStream inp final File tempFile = new File(tempDirectory, FILE_NAME); Files.copy(inputStream, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING); List> actualRecordList = new ArrayList<>(); - try (ParquetFileReader parquetFileReader = new ParquetFileReader(HadoopInputFile.fromPath(new Path(tempFile.toURI()), new Configuration()), ParquetReadOptions.builder().build())) { + try (final ParquetFileReader parquetFileReader = new ParquetFileReader(new LocalInputFile(Path.of(tempFile.toURI())), ParquetReadOptions.builder().build())) { final ParquetMetadata footer = parquetFileReader.getFooter(); final MessageType schema = createdParquetSchema(footer); PageReadStore pages; From 87292fcb19eefb9cd830c76379ad72b5554d162d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:42:31 -0700 Subject: [PATCH 2/7] Bump com.apptasticsoftware:rssreader in /data-prepper-plugins/rss-source (#4672) Bumps [com.apptasticsoftware:rssreader](https://github.com/w3stling/rssreader) from 3.6.0 to 3.7.0. - [Release notes](https://github.com/w3stling/rssreader/releases) - [Commits](https://github.com/w3stling/rssreader/compare/v3.6.0...v3.7.0) --- updated-dependencies: - dependency-name: com.apptasticsoftware:rssreader dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- data-prepper-plugins/rss-source/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-prepper-plugins/rss-source/build.gradle b/data-prepper-plugins/rss-source/build.gradle index 68c0ff9e57..686e40367b 100644 --- a/data-prepper-plugins/rss-source/build.gradle +++ b/data-prepper-plugins/rss-source/build.gradle @@ -13,7 +13,7 @@ dependencies { implementation 'joda-time:joda-time:2.12.7' implementation 'com.fasterxml.jackson.core:jackson-core' implementation 'com.fasterxml.jackson.core:jackson-databind' - implementation 'com.apptasticsoftware:rssreader:3.6.0' + implementation 'com.apptasticsoftware:rssreader:3.7.0' testImplementation libs.commons.lang3 testImplementation project(':data-prepper-test-common') testImplementation 'org.mock-server:mockserver-junit-jupiter-no-dependencies:5.15.0' From 6db5b471e3c07dc08f921b69e41a585b72825691 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:43:01 -0700 Subject: [PATCH 3/7] Bump org.wiremock:wiremock in /data-prepper-plugins/s3-source (#4683) Bumps [org.wiremock:wiremock](https://github.com/wiremock/wiremock) from 3.4.2 to 3.8.0. - [Release notes](https://github.com/wiremock/wiremock/releases) - [Commits](https://github.com/wiremock/wiremock/compare/3.4.2...3.8.0) --- updated-dependencies: - dependency-name: org.wiremock:wiremock dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- data-prepper-plugins/s3-source/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-prepper-plugins/s3-source/build.gradle b/data-prepper-plugins/s3-source/build.gradle index 1187fa7ec0..2a09ce3d90 100644 --- a/data-prepper-plugins/s3-source/build.gradle +++ b/data-prepper-plugins/s3-source/build.gradle @@ -31,7 +31,7 @@ dependencies { implementation 'dev.failsafe:failsafe:3.3.2' implementation 'org.apache.httpcomponents:httpcore:4.4.16' testImplementation libs.commons.lang3 - testImplementation 'org.wiremock:wiremock:3.4.2' + testImplementation 'org.wiremock:wiremock:3.8.0' testImplementation 'org.eclipse.jetty:jetty-bom:11.0.20' testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' testImplementation testLibs.junit.vintage From d12b2175c98e3a4232f07661b2535a35282f3226 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:43:47 -0700 Subject: [PATCH 4/7] Bump ws from 7.5.9 to 7.5.10 in /release/staging-resources-cdk (#4639) Bumps [ws](https://github.com/websockets/ws) from 7.5.9 to 7.5.10. - [Release notes](https://github.com/websockets/ws/releases) - [Commits](https://github.com/websockets/ws/compare/7.5.9...7.5.10) --- updated-dependencies: - dependency-name: ws dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- release/staging-resources-cdk/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/release/staging-resources-cdk/package-lock.json b/release/staging-resources-cdk/package-lock.json index 7ac1eaed21..32da99d8c9 100644 --- a/release/staging-resources-cdk/package-lock.json +++ b/release/staging-resources-cdk/package-lock.json @@ -7720,9 +7720,9 @@ } }, "node_modules/ws": { - "version": "7.5.9", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", - "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", + "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", "dev": true, "engines": { "node": ">=8.3.0" @@ -13755,9 +13755,9 @@ } }, "ws": { - "version": "7.5.9", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", - "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", + "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", "dev": true, "requires": {} }, From a8df481dfba3880d0b3121757fa6bb64c79afb70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:44:18 -0700 Subject: [PATCH 5/7] Bump braces from 3.0.2 to 3.0.3 in /testing/aws-testing-cdk (#4638) Bumps [braces](https://github.com/micromatch/braces) from 3.0.2 to 3.0.3. - [Changelog](https://github.com/micromatch/braces/blob/master/CHANGELOG.md) - [Commits](https://github.com/micromatch/braces/compare/3.0.2...3.0.3) --- updated-dependencies: - dependency-name: braces dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- testing/aws-testing-cdk/package-lock.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/testing/aws-testing-cdk/package-lock.json b/testing/aws-testing-cdk/package-lock.json index fbb7310d4f..c7ae43fe77 100644 --- a/testing/aws-testing-cdk/package-lock.json +++ b/testing/aws-testing-cdk/package-lock.json @@ -2310,12 +2310,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -3102,9 +3102,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" From 60230ca1400e49844015c86234de57d2155d2aee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:45:24 -0700 Subject: [PATCH 6/7] Bump software.amazon.awssdk:auth in /performance-test (#4685) Bumps software.amazon.awssdk:auth from 2.25.21 to 2.26.12. --- updated-dependencies: - dependency-name: software.amazon.awssdk:auth dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- performance-test/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/performance-test/build.gradle b/performance-test/build.gradle index 8c4a9693d2..6dd99cb08d 100644 --- a/performance-test/build.gradle +++ b/performance-test/build.gradle @@ -15,7 +15,7 @@ configurations.all { group 'org.opensearch.dataprepper.test.performance' dependencies { - gatlingImplementation 'software.amazon.awssdk:auth:2.25.21' + gatlingImplementation 'software.amazon.awssdk:auth:2.26.12' implementation 'com.fasterxml.jackson.core:jackson-core' testRuntimeOnly testLibs.junit.engine From af6bce4fc3fd993ce368a22e24da2a75f7df4d8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:46:28 -0700 Subject: [PATCH 7/7] Bump org.apache.maven:maven-artifact in /data-prepper-plugins/opensearch (#4692) Bumps [org.apache.maven:maven-artifact](https://github.com/apache/maven) from 3.9.6 to 3.9.8. - [Release notes](https://github.com/apache/maven/releases) - [Commits](https://github.com/apache/maven/compare/maven-3.9.6...maven-3.9.8) --- updated-dependencies: - dependency-name: org.apache.maven:maven-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- data-prepper-plugins/opensearch/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-prepper-plugins/opensearch/build.gradle b/data-prepper-plugins/opensearch/build.gradle index 1f96c1f6ea..b87e533afe 100644 --- a/data-prepper-plugins/opensearch/build.gradle +++ b/data-prepper-plugins/opensearch/build.gradle @@ -36,7 +36,7 @@ dependencies { implementation 'software.amazon.awssdk:apache-client' implementation 'software.amazon.awssdk:netty-nio-client' implementation 'co.elastic.clients:elasticsearch-java:7.17.0' - implementation('org.apache.maven:maven-artifact:3.9.6') { + implementation('org.apache.maven:maven-artifact:3.9.8') { exclude group: 'org.codehaus.plexus' } testImplementation testLibs.junit.vintage