Skip to content

Commit

Permalink
feat(search): update to support OpenSearch 2.x
Browse files Browse the repository at this point in the history
  • Loading branch information
david-leifker committed Sep 19, 2023
1 parent e75900b commit 5f5147f
Show file tree
Hide file tree
Showing 182 changed files with 2,913 additions and 1,584 deletions.
19 changes: 8 additions & 11 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ buildscript {
ext.openTelemetryVersion = '1.18.0'
ext.neo4jVersion = '4.4.9'
ext.testContainersVersion = '1.17.4'
ext.elasticsearchVersion = '7.10.2'
ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x
ext.jacksonVersion = '2.15.2'
ext.jettyVersion = '9.4.46.v20220331'
ext.playVersion = '2.8.18'
Expand Down Expand Up @@ -90,8 +90,8 @@ project.ext.externalDependency = [
'ebean': 'io.ebean:ebean:' + ebeanVersion,
'ebeanAgent': 'io.ebean:ebean-agent:' + ebeanVersion,
'ebeanDdl': 'io.ebean:ebean-ddl-generator:' + ebeanVersion,
'elasticSearchRest': 'org.elasticsearch.client:elasticsearch-rest-high-level-client:' + elasticsearchVersion,
'elasticSearchTransport': 'org.elasticsearch.client:transport:' + elasticsearchVersion,
'elasticSearchRest': 'org.opensearch.client:opensearch-rest-high-level-client:' + elasticsearchVersion,
'elasticSearchJava': 'org.opensearch.client:opensearch-java:2.6.0',
'findbugsAnnotations': 'com.google.code.findbugs:annotations:3.0.1',
'graphqlJava': 'com.graphql-java:graphql-java:19.5',
'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:19.1',
Expand Down Expand Up @@ -202,13 +202,15 @@ project.ext.externalDependency = [
'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion",
'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.1.12',
'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.41',
'testng': 'org.testng:testng:7.3.0',
'testngJava8': 'org.testng:testng:7.5.1',
'testng': 'org.testng:testng:7.8.0',
'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion,
'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion,
'testContainersPostgresql':'org.testcontainers:postgresql:' + testContainersVersion,
'testContainersElasticsearch': 'org.testcontainers:elasticsearch:' + testContainersVersion,
'testContainersCassandra': 'org.testcontainers:cassandra:' + testContainersVersion,
'testContainersKafka': 'org.testcontainers:kafka:' + testContainersVersion,
'testContainersOpenSearch': 'org.opensearch:opensearch-testcontainers:2.0.0',
'typesafeConfig':'com.typesafe:config:1.4.1',
'wiremock':'com.github.tomakehurst:wiremock:2.10.0',
'zookeeper': 'org.apache.zookeeper:zookeeper:3.4.14',
Expand Down Expand Up @@ -257,7 +259,6 @@ subprojects {

plugins.withType(JavaPlugin) {
dependencies {
testImplementation externalDependency.testng
constraints {
implementation('io.netty:netty-all:4.1.86.Final')
implementation('org.apache.commons:commons-compress:1.21')
Expand All @@ -268,12 +269,6 @@ subprojects {
}
}

tasks.withType(Test) {
if (!name.startsWith('integ')) {
useTestNG()
}
}

checkstyle {
configDirectory = file("${project.rootDir}/gradle/checkstyle")
sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ]
Expand All @@ -292,6 +287,8 @@ subprojects {
javaLauncher = javaToolchains.launcherFor {
languageVersion = JavaLanguageVersion.of(11)
}
// https://docs.gradle.org/current/userguide/performance.html
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1
}

afterEvaluate {
Expand Down
1 change: 1 addition & 0 deletions datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ dependencies {
annotationProcessor externalDependency.lombok

testImplementation externalDependency.mockito
testImplementation externalDependency.testng
}

graphqlCodegen {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,25 +20,25 @@
import javax.annotation.Nonnull;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.Cardinality;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.index.query.BoolQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregationBuilders;
import org.opensearch.search.aggregations.Aggregations;
import org.opensearch.search.aggregations.BucketOrder;
import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.opensearch.search.aggregations.bucket.filter.Filter;
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.opensearch.search.aggregations.bucket.histogram.Histogram;
import org.opensearch.search.aggregations.bucket.terms.Terms;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.opensearch.search.aggregations.metrics.Cardinality;
import org.opensearch.search.builder.SearchSourceBuilder;


@Slf4j
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import graphql.schema.DataFetchingEnvironment;
import junit.framework.TestCase;
import org.testng.annotations.Test;
import org.mockito.Mockito;

Expand All @@ -24,9 +23,10 @@
import java.util.stream.Collectors;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;
import static org.testng.AssertJUnit.assertEquals;


public class ResolverUtilsTest extends TestCase {
public class ResolverUtilsTest {

@Test
public void testCriterionFromFilter() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
import com.linkedin.metadata.Constants;
import graphql.schema.DataFetchingEnvironment;
import java.util.Collections;
import junit.framework.TestCase;
import org.mockito.Mockito;
import org.testng.annotations.Test;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;


public class ListAccessTokensResolverTest extends TestCase {
public class ListAccessTokensResolverTest {

// @Test
@Test
public void testGetSuccess() throws Exception {
final DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);
final QueryContext mockAllowContext = TestUtils.getMockAllowContext();
Expand All @@ -36,13 +36,13 @@ public void testGetSuccess() throws Exception {
Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input);

final EntityClient mockClient = Mockito.mock(EntityClient.class);
Mockito.when(mockClient.filter(
Mockito.when(Mockito.eq(mockClient.filter(
Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME),
Mockito.eq(buildFilter(filters, Collections.emptyList())),
Mockito.notNull(),
Mockito.eq(input.getStart()),
Mockito.eq(input.getCount()),
Mockito.eq(getAuthentication(mockEnv))))
Mockito.eq(getAuthentication(mockEnv)))))
.thenReturn(null);

final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,12 @@
import com.linkedin.r2.RemoteInvocationException;
import graphql.schema.DataFetchingEnvironment;
import java.util.HashSet;

import org.mockito.Mockito;
import org.testng.annotations.Test;

import static org.testng.Assert.*;
import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*;
import static org.testng.Assert.assertThrows;

public class GetIngestionSourceResolverTest {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils;
import com.linkedin.metadata.entity.EntityService;
import junit.framework.TestCase;
import org.mockito.Mockito;
import org.testng.annotations.Test;

import java.util.HashSet;
import java.util.Optional;

import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME;
import static org.testng.AssertJUnit.assertEquals;

public class SiblingsUtilsTest extends TestCase {
public class SiblingsUtilsTest {

private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)";
private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package com.linkedin.datahub.graphql.utils;

import com.linkedin.datahub.graphql.util.DateUtil;
import junit.framework.TestCase;
import org.joda.time.DateTime;
import org.mockito.Mockito;
import org.testng.annotations.Test;

public class DateUtilTest extends TestCase {
import static org.testng.AssertJUnit.assertEquals;

public class DateUtilTest {

private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) {
DateTime result = new DateTime()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import io.ebean.Database;
import javax.annotation.Nonnull;
import org.elasticsearch.client.RestHighLevelClient;
import org.opensearch.client.RestHighLevelClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import java.util.function.Function;
import lombok.RequiredArgsConstructor;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;


// Do we need SQL-tech specific migration paths?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.client.RestHighLevelClient;
import org.opensearch.client.RestHighLevelClient;


public class NoCodeCleanupUpgrade implements Upgrade {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.client.RequestOptions;
import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.opensearch.client.RequestOptions;

import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING;
import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.indices.ResizeRequest;
import org.opensearch.OpenSearchStatusException;
import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.indices.ResizeRequest;

import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING;
import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs;
Expand Down Expand Up @@ -97,7 +97,7 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc
ack = _esComponents.getSearchClient().indices()
.putSettings(request, RequestOptions.DEFAULT).isAcknowledged();
log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack);
} catch (ElasticsearchStatusException | IOException ese) {
} catch (OpenSearchStatusException | IOException ese) {
// Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps.
// Since no data are in there they are skippable.
// Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder;
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.client.RestHighLevelClient;
import org.opensearch.client.RestHighLevelClient;

import java.util.List;
import java.util.function.Function;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.NotImplementedException;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.client.GetAliasesResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.opensearch.client.GetAliasesResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;

import java.io.IOException;
import java.util.ArrayList;
Expand Down
10 changes: 10 additions & 0 deletions docker/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,16 @@ task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') {
// environment "ACTIONS_VERSION", 'alpine3.17-slim'
// environment "DATAHUB_ACTIONS_IMAGE", 'nginx'

// Elastic
// environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch'
// environment "DATAHUB_SEARCH_TAG", '7.10.1'

// OpenSearch
environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch'
environment "DATAHUB_SEARCH_TAG", '2.9.0'
environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true'
environment "USE_AWS_ELASTICSEARCH", 'true'

def cmd = [
'source ../metadata-ingestion/venv/bin/activate && ',
'datahub docker quickstart',
Expand Down
7 changes: 5 additions & 2 deletions docker/docker-compose-with-cassandra.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,9 @@ services:
context: ../
dockerfile: docker/elasticsearch-setup/Dockerfile
env_file: elasticsearch-setup/env/docker.env
environment:
- ELASTICSEARCH_USE_SSL=${ELASTICSEARCH_USE_SSL:-false}
- USE_AWS_ELASTICSEARCH=${USE_AWS_ELASTICSEARCH:-false}
depends_on:
elasticsearch:
condition: service_healthy
Expand All @@ -117,13 +120,13 @@ services:
elasticsearch:
container_name: elasticsearch
hostname: elasticsearch
image: elasticsearch:7.10.1
image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1}
ports:
- 9200:9200
env_file: elasticsearch/env/docker.env
environment:
- discovery.type=single-node
- xpack.security.enabled=false
- ${XPACK_SECURITY_ENABLED:-xpack.security.enabled=false}
healthcheck:
test: curl -sS --fail http://elasticsearch:9200/_cluster/health?wait_for_status=yellow&timeout=0s
start_period: 5s
Expand Down
7 changes: 5 additions & 2 deletions docker/docker-compose-without-neo4j.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,9 @@ services:
context: ../
dockerfile: docker/elasticsearch-setup/Dockerfile
env_file: elasticsearch-setup/env/docker.env
environment:
- ELASTICSEARCH_USE_SSL=${ELASTICSEARCH_USE_SSL:-false}
- USE_AWS_ELASTICSEARCH=${USE_AWS_ELASTICSEARCH:-false}
depends_on:
elasticsearch:
condition: service_healthy
Expand All @@ -104,13 +107,13 @@ services:
elasticsearch:
container_name: elasticsearch
hostname: elasticsearch
image: elasticsearch:7.10.1
image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1}
ports:
- ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200
env_file: elasticsearch/env/docker.env
environment:
- discovery.type=single-node
- xpack.security.enabled=false
- ${XPACK_SECURITY_ENABLED:-xpack.security.enabled=false}
deploy:
resources:
limits:
Expand Down
Loading

0 comments on commit 5f5147f

Please sign in to comment.