diff --git a/build.gradle b/build.gradle index 17b21757f0380..228a8a9f5ff0a 100644 --- a/build.gradle +++ b/build.gradle @@ -17,7 +17,7 @@ buildscript { ext.javaClassVersion = { p -> // If Spring 6 is present, hard dependency on jdk17 - if (p.configurations.any { it.getDependencies().any{ + if (p.configurations.any { it.getDependencies().any { (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) }}) { @@ -43,7 +43,7 @@ buildscript { ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x ext.jacksonVersion = '2.15.3' ext.jettyVersion = '11.0.19' - ext.playVersion = '2.8.18' + ext.playVersion = '2.8.21' ext.log4jVersion = '2.19.0' ext.slf4jVersion = '1.7.36' ext.logbackClassic = '1.4.14' @@ -132,7 +132,7 @@ project.ext.externalDependency = [ 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:21.0', 'gson': 'com.google.code.gson:gson:2.8.9', 'guice': 'com.google.inject:guice:7.0.0', - 'guice4': 'com.google.inject:guice:4.2.3', // Used for frontend while still on old Play version + 'guicePlay': 'com.google.inject:guice:5.0.1', // Used for frontend while still on old Play version 'guava': 'com.google.guava:guava:32.1.2-jre', 'h2': 'com.h2database:h2:2.2.224', 'hadoopCommon':'org.apache.hadoop:hadoop-common:2.7.2', diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index 699dd360fa523..c929d80328a31 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -8,9 +8,11 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; +import com.google.inject.name.Named; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.metadata.restli.DefaultRestliClientFactory; @@ -20,6 +22,13 @@ import controllers.SsoCallbackController; import java.nio.charset.StandardCharsets; import java.util.Collections; + +import io.datahubproject.metadata.context.ActorContext; +import io.datahubproject.metadata.context.AuthorizerContext; +import io.datahubproject.metadata.context.EntityRegistryContext; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.SearchContext; import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; @@ -152,6 +161,31 @@ protected Authentication provideSystemAuthentication() { Collections.emptyMap()); } + @Provides + @Singleton + @Named("systemOperationContext") + protected OperationContext provideOperationContext(final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + ActorContext systemActorContext = + ActorContext.builder() + .systemAuth(true) + .authentication(systemAuthentication) + .build(); + OperationContextConfig systemConfig = OperationContextConfig.builder() + .searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch()) + .allowSystemAuthentication(true) + .build(); + + return OperationContext.builder() + .operationContextConfig(systemConfig) + .systemActorContext(systemActorContext) + .searchContext(SearchContext.EMPTY) + .entityRegistryContext(EntityRegistryContext.EMPTY) + // Authorizer.EMPTY doesn't actually apply to system auth + .authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build()) + .build(systemAuthentication); + } + @Provides @Singleton protected ConfigurationProvider provideConfigurationProvider() { @@ -163,13 +197,13 @@ protected ConfigurationProvider provideConfigurationProvider() { @Provides @Singleton protected SystemEntityClient provideEntityClient( - final Authentication systemAuthentication, + @Named("systemOperationContext") final OperationContext systemOperationContext, final ConfigurationProvider configurationProvider) { return new SystemRestliEntityClient( + systemOperationContext, buildRestliClient(), new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 0f2945d5d2393..9f548b104e8fe 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,5 +1,6 @@ package config; +import com.datahub.authorization.AuthorizationConfiguration; import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; @@ -26,4 +27,7 @@ public class ConfigurationProvider { /** Configuration for the view layer */ private VisualConfiguration visualConfig; + + /** Configuration for authorization */ + private AuthorizationConfiguration authorization; } diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index 9bd77e5279a91..b14962e5900cd 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -76,7 +76,7 @@ dependencies { implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - runtimeOnly externalDependency.guice4 + runtimeOnly externalDependency.guicePlay runtimeOnly (externalDependency.playDocs) { exclude group: 'com.typesafe.akka', module: 'akka-http-core_2.12' } @@ -90,7 +90,7 @@ dependencies { play { platform { - playVersion = '2.8.18' + playVersion = '2.8.21' scalaVersion = '2.12' javaVersion = JavaVersion.VERSION_11 } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index a647d0ae4e3bb..5f555b45d3b09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -4,7 +4,6 @@ public class Constants { private Constants() {} - ; public static final String URN_FIELD_NAME = "urn"; public static final String URNS_FIELD_NAME = "urns"; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index fdb91a749b226..e9d94d313b70e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -321,6 +321,7 @@ import com.linkedin.datahub.graphql.types.ownership.OwnershipType; import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; +import com.linkedin.datahub.graphql.types.restricted.RestrictedType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; @@ -349,6 +350,7 @@ import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; @@ -416,6 +418,7 @@ public class GmsGraphQLEngine { private final QueryService queryService; private final DataProductService dataProductService; private final FormService formService; + private final RestrictedService restrictedService; private final FeatureFlags featureFlags; @@ -468,6 +471,7 @@ public class GmsGraphQLEngine { private final EntityTypeType entityTypeType; private final FormType formType; private final IncidentType incidentType; + private final RestrictedType restrictedType; private final int graphQLQueryComplexityLimit; private final int graphQLQueryDepthLimit; @@ -527,6 +531,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.queryService = args.queryService; this.dataProductService = args.dataProductService; this.formService = args.formService; + this.restrictedService = args.restrictedService; this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); @@ -576,6 +581,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.entityTypeType = new EntityTypeType(entityClient); this.formType = new FormType(entityClient); this.incidentType = new IncidentType(entityClient); + this.restrictedType = new RestrictedType(entityClient, restrictedService); this.graphQLQueryComplexityLimit = args.graphQLQueryComplexityLimit; this.graphQLQueryDepthLimit = args.graphQLQueryDepthLimit; @@ -619,7 +625,8 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataTypeType, entityTypeType, formType, - incidentType); + incidentType, + restrictedType); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for those types @@ -709,6 +716,7 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureStructuredPropertyResolvers(builder); configureFormResolvers(builder); configureIncidentResolvers(builder); + configureRestrictedResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { @@ -1456,7 +1464,12 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1823,7 +1836,10 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( @@ -1950,7 +1966,10 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -2072,7 +2091,12 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( @@ -2144,7 +2168,10 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( @@ -2188,7 +2215,12 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "platform", @@ -2271,7 +2303,12 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) @@ -2316,7 +2353,12 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -2339,7 +2381,12 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde typeWiring .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) @@ -2359,7 +2406,12 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde typeWiring .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) @@ -2656,7 +2708,10 @@ private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder b typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "state", new TimeSeriesAspectResolver( @@ -2759,4 +2814,16 @@ private void configureIncidentResolvers(final RuntimeWiring.Builder builder) { typeWiring.dataFetcher("incidents", new EntityIncidentsResolver(entityClient))); } } + + private void configureRestrictedResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Restricted", + typeWiring -> + typeWiring + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index c70e28811a186..df32530129b04 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -29,6 +29,7 @@ import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; @@ -75,6 +76,7 @@ public class GmsGraphQLEngineArgs { FeatureFlags featureFlags; DataProductService dataProductService; FormService formService; + RestrictedService restrictedService; int graphQLQueryComplexityLimit; int graphQLQueryDepthLimit; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 9f110e713ed57..7dffd90cf2d7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -3,6 +3,7 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; +import io.datahubproject.metadata.context.OperationContext; /** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { @@ -25,4 +26,9 @@ default String getActorUrn() { /** Returns the authorizer used to authorize specific actions. */ Authorizer getAuthorizer(); + + /** + * @return Returns the operational context + */ + OperationContext getOperationContext(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index de389a358d936..c4c353f6eb8db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -2,9 +2,9 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.analytics.service.AnalyticsUtil; import com.linkedin.datahub.graphql.generated.AnalyticsChart; import com.linkedin.datahub.graphql.generated.AnalyticsChartGroup; @@ -12,7 +12,6 @@ import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput; import com.linkedin.datahub.graphql.generated.NamedBar; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -22,6 +21,7 @@ import com.linkedin.metadata.search.utils.QueryUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -41,7 +41,7 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final Authentication authentication = ResolverUtils.getAuthentication(environment); + final QueryContext context = environment.getContext(); final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); @@ -49,7 +49,7 @@ public final List get(DataFetchingEnvironment environment) final AnalyticsChartGroup group = new AnalyticsChartGroup(); group.setGroupId("FilteredMetadataAnalytics"); group.setTitle(""); - group.setCharts(getCharts(input, authentication)); + group.setCharts(getCharts(input, context.getOperationContext())); return ImmutableList.of(group); } catch (Exception e) { log.error("Failed to retrieve metadata analytics!", e); @@ -57,8 +57,8 @@ public final List get(DataFetchingEnvironment environment) } } - private List getCharts( - MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List getCharts(MetadataAnalyticsInput input, OperationContext opContext) + throws Exception { final List charts = new ArrayList<>(); List entities = Collections.emptyList(); @@ -77,8 +77,7 @@ private List getCharts( } SearchResult searchResult = - _entityClient.searchAcrossEntities( - entities, query, filter, 0, 0, null, null, authentication); + _entityClient.searchAcrossEntities(opContext, entities, query, filter, 0, 0, null, null); List aggregationMetadataList = searchResult.getMetadata().getAggregations(); @@ -96,7 +95,7 @@ private List getCharts( Constants.DOMAIN_ENTITY_NAME, ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, - authentication); + opContext.getSessionAuthentication()); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } @@ -113,7 +112,7 @@ private List getCharts( Constants.DATA_PLATFORM_ENTITY_NAME, ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, - authentication); + opContext.getSessionAuthentication()); charts.add( BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } @@ -132,7 +131,7 @@ private List getCharts( ImmutableSet.of( Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, - authentication); + opContext.getSessionAuthentication()); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 4490bbcec03fc..1a935d530505b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.VIEW_ENTITY_PRIVILEGES; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.ConjunctivePrivilegeGroup; @@ -190,6 +191,20 @@ public static boolean canDeleteQuery( return canEditEntityQueries(subjectUrns, context); } + public static boolean canViewEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(VIEW_ENTITY_PRIVILEGES))); + + final Authorizer authorizer = context.getAuthorizer(); + final String actor = context.getActorUrn(); + final String entityType = entityUrn.getEntityType(); + final Optional resourceSpec = + Optional.of(new EntitySpec(entityType, entityUrn.toString())); + + return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, orGroup); + } + public static boolean isAuthorized( @Nonnull QueryContext context, @Nonnull Optional resourceSpec, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index 5cfa80e394c5f..83789ec488e64 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchResult; @@ -65,14 +64,15 @@ public CompletableFuture get(DataFetchingEnvironment envi .setOrder(SortOrder.DESCENDING); final SearchResult searchResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.ACCESS_TOKEN_ENTITY_NAME, "", buildFilter(filters, Collections.emptyList()), sortCriterion, start, - count, - getAuthentication(environment), - new SearchFlags().setFulltext(true)); + count); final List tokens = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 7bcde0ea9bdc1..b40c6a3fd0f78 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -76,6 +76,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen BrowseResultV2 browseResults = _entityClient.browseV2( + context.getOperationContext().withSearchFlags(flags -> searchFlags), entityNames, pathStr, maybeResolvedView != null @@ -84,9 +85,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen : inputFilter, sanitizedQuery, start, - count, - context.getAuthentication(), - searchFlags); + count); return mapBrowseResults(browseResults); } catch (Exception e) { throw new RuntimeException("Failed to execute browse V2", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 58f7715c3e627..b757a8aa2aab9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -79,6 +79,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro return UrnSearchResultsMapper.map( _entityClient.searchAcrossEntities( + context.getOperationContext(), CONTAINABLE_ENTITY_NAMES, query, new Filter() @@ -90,8 +91,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro start, count, null, - null, - context.getAuthentication())); + null)); } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 72912087190c0..29729a8799e79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -134,10 +134,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } else { + searchFlags = null; } try { @@ -151,14 +153,15 @@ public CompletableFuture get(DataFetchingEnvironment environment) return UrnSearchResultsMapper.map( _entityClient.searchAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), finalEntityNames, sanitizedQuery, finalFilter, start, count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); + null)); } catch (Exception e) { log.error( "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index be887d845f385..e251e36a3e15f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -50,7 +50,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + if (!isAuthorizedToUpdateDeprecationForEntity(context, entityUrn)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 6229e38954163..829b2f903833d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -86,6 +86,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro return UrnSearchResultsMapper.map( _entityClient.searchAcrossEntities( + context.getOperationContext(), SEARCHABLE_ENTITY_TYPES.stream() .map(EntityTypeMapper::getName) .collect(Collectors.toList()), @@ -97,8 +98,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro start, count, null, - null, - context.getAuthentication())); + null)); } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 5453603f4cc9f..fe4a7f23cfaab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -62,6 +61,7 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all domain Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DOMAIN_ENTITY_NAME, query, filter, @@ -69,9 +69,7 @@ public CompletableFuture get(final DataFetchingEnvironment en .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Now that we have entities we can bind this to a result. final ListDomainsResult result = new ListDomainsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index 147663059aa82..54fd7ef1fe04d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -140,7 +140,7 @@ private Map getTermsWithSameParent(Urn parentNode, QueryCon final Filter filter = buildParentNodeFilter(parentNode); final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); + context.getOperationContext(), GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000); final List termUrns = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index e7990b1a343d8..700a38d50b317 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -53,12 +53,12 @@ public CompletableFuture get( final Filter filter = buildGlossaryEntitiesFilter(); final SearchResult gmsNodesResult = _entityClient.filter( + context.getOperationContext(), Constants.GLOSSARY_NODE_ENTITY_NAME, filter, null, start, - count, - context.getAuthentication()); + count); final List glossaryNodeUrns = gmsNodesResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index 40e4363dcff93..9669d406344e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -53,12 +53,12 @@ public CompletableFuture get( final Filter filter = buildGlossaryEntitiesFilter(); final SearchResult gmsTermsResult = _entityClient.filter( + context.getOperationContext(), Constants.GLOSSARY_TERM_ENTITY_NAME, filter, null, start, - count, - context.getAuthentication()); + count); final List glossaryTermUrns = gmsTermsResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index 1f8c17ee72884..8abe237898293 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -41,10 +41,10 @@ public CompletableFuture get(final DataFetchingEnvironment e // First, get all counts Map gmsResult = _entityClient.batchGetTotalEntityCount( + context.getOperationContext(), input.getTypes().stream() .map(EntityTypeMapper::getName) - .collect(Collectors.toList()), - context.getAuthentication()); + .collect(Collectors.toList())); // bind to a result. List resultList = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index a6ad8698679f0..70be478d65c5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.generated.ListGroupsResult; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -58,6 +57,9 @@ public CompletableFuture get(final DataFetchingEnvironment env // First, get all group Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), CORP_GROUP_ENTITY_NAME, query, null, @@ -65,9 +67,7 @@ public CompletableFuture get(final DataFetchingEnvironment env .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get hydrate all groups. final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java index 7958503556274..ad36621d20c66 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java @@ -138,7 +138,7 @@ private Health computeIncidentsHealthForAsset( final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString()); final SearchResult searchResult = _entityClient.filter( - Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1, context.getAuthentication()); + context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1); final Integer activeIncidentCount = searchResult.getNumEntities(); if (activeIncidentCount > 0) { // There are active incidents. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java index c797044d1b224..c0c3217fd056d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -62,12 +62,12 @@ public CompletableFuture get(DataFetchingEnvironment envi final SortCriterion sortCriterion = buildIncidentsSortCriterion(); final SearchResult searchResult = _entityClient.filter( + context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, sortCriterion, start, - count, - context.getAuthentication()); + count); final List incidentUrns = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index 01100a24d6b15..4c8a06e2d585a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -67,6 +67,7 @@ public CompletableFuture get( final SearchResult executionsSearchResult = _entityClient.filter( + context.getOperationContext(), Constants.EXECUTION_REQUEST_ENTITY_NAME, new Filter() .setOr( @@ -78,8 +79,7 @@ public CompletableFuture get( .setField(REQUEST_TIME_MS_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication()); + count); // 2. Batch fetch the related ExecutionRequests final Set relatedExecRequests = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index eb054295af09b..b3c7db20f4537 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -17,7 +17,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -67,6 +66,9 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all secrets final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.SECRETS_ENTITY_NAME, query, null, @@ -74,9 +76,7 @@ public CompletableFuture get(final DataFetchingEnvironment en .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, resolve all secrets final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index 51c9e30aadcce..d2387820ca7ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -14,7 +14,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -63,14 +62,15 @@ public CompletableFuture get( // First, get all ingestion sources Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.INGESTION_SOURCE_ENTITY_NAME, query, buildFilter(filters, Collections.emptyList()), null, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, resolve all ingestion sources final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index 06bad27e27062..325e804327b72 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -63,12 +63,12 @@ public CompletableFuture get(DataFetchingEnvironment final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); final SearchResult gmsResult = _entityClient.filter( + context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, sortCriterion, start, - count, - context.getAuthentication()); + count); final List dataProcessInstanceUrns = gmsResult.getEntities().stream() .map(SearchEntity::getEntity) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index d595b1e513d75..1c6fa352fecb6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -69,12 +69,12 @@ public CompletableFuture get(DataFetchingEnvironment final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); final SearchResult gmsResult = _entityClient.filter( + context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, sortCriterion, start, - count, - context.getAuthentication()); + count); final List dataProcessInstanceUrns = gmsResult.getEntities().stream() .map(SearchEntity::getEntity) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index c63ec819e8f6a..6f56bfed94240 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -2,18 +2,26 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.datahub.authorization.AuthorizationConfiguration; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.LineageInput; import com.linkedin.datahub.graphql.generated.LineageRelationship; +import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.graph.SiblingGraphService; +import com.linkedin.metadata.service.RestrictedService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -29,16 +37,28 @@ public class EntityLineageResultResolver implements DataFetcher> { private final SiblingGraphService _siblingGraphService; + private final RestrictedService _restrictedService; + private final AuthorizationConfiguration _authorizationConfiguration; - public EntityLineageResultResolver(final SiblingGraphService siblingGraphService) { + public EntityLineageResultResolver( + final SiblingGraphService siblingGraphService, + final RestrictedService restrictedService, + final AuthorizationConfiguration authorizationConfiguration) { _siblingGraphService = siblingGraphService; + _restrictedService = restrictedService; + _authorizationConfiguration = authorizationConfiguration; } @Override public CompletableFuture get(DataFetchingEnvironment environment) { - final String urn = ((Entity) environment.getSource()).getUrn(); + final QueryContext context = environment.getContext(); + Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); + if (urn.getEntityType().equals(Constants.RESTRICTED_ENTITY_NAME)) { + urn = _restrictedService.decryptRestrictedUrn(urn); + } + final LineageDirection lineageDirection = input.getDirection(); @Nullable final Integer start = input.getStart(); // Optional! @Nullable final Integer count = input.getCount(); // Optional! @@ -49,12 +69,13 @@ public CompletableFuture get(DataFetchingEnvironment enviro com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); + final Urn finalUrn = urn; return CompletableFuture.supplyAsync( () -> { try { - return mapEntityRelationships( + com.linkedin.metadata.graph.EntityLineageResult entityLineageResult = _siblingGraphService.getLineage( - Urn.createFromString(urn), + finalUrn, resolvedDirection, start != null ? start : 0, count != null ? count : 100, @@ -62,16 +83,31 @@ public CompletableFuture get(DataFetchingEnvironment enviro separateSiblings != null ? input.getSeparateSiblings() : false, new HashSet<>(), startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + endTimeMillis); + + Set restrictedUrns = new HashSet<>(); + entityLineageResult + .getRelationships() + .forEach( + rel -> { + if (_authorizationConfiguration.getSearch().isEnabled() + && !AuthorizationUtils.canViewEntity(rel.getEntity(), context)) { + restrictedUrns.add(rel.getEntity()); + } + }); + + return mapEntityRelationships(entityLineageResult, restrictedUrns); + } catch (Exception e) { + log.error("Failed to fetch lineage for {}", finalUrn); + throw new RuntimeException( + String.format("Failed to fetch lineage for {}", finalUrn), e); } }); } private EntityLineageResult mapEntityRelationships( - final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult) { + final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult, + final Set restrictedUrns) { final EntityLineageResult result = new EntityLineageResult(); result.setStart(entityLineageResult.getStart()); result.setCount(entityLineageResult.getCount()); @@ -79,17 +115,28 @@ private EntityLineageResult mapEntityRelationships( result.setFiltered(entityLineageResult.getFiltered()); result.setRelationships( entityLineageResult.getRelationships().stream() - .map(this::mapEntityRelationship) + .map(r -> mapEntityRelationship(r, restrictedUrns)) .collect(Collectors.toList())); return result; } private LineageRelationship mapEntityRelationship( - final com.linkedin.metadata.graph.LineageRelationship lineageRelationship) { + final com.linkedin.metadata.graph.LineageRelationship lineageRelationship, + final Set restrictedUrns) { final LineageRelationship result = new LineageRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(lineageRelationship.getEntity()); - if (partialEntity != null) { - result.setEntity(partialEntity); + if (restrictedUrns.contains(lineageRelationship.getEntity())) { + final Restricted restrictedEntity = new Restricted(); + restrictedEntity.setType(EntityType.RESTRICTED); + String restrictedUrnString = + _restrictedService.encryptRestrictedUrn(lineageRelationship.getEntity()).toString(); + + restrictedEntity.setUrn(restrictedUrnString); + result.setEntity(restrictedEntity); + } else { + final Entity partialEntity = UrnToEntityMapper.map(lineageRelationship.getEntity()); + if (partialEntity != null) { + result.setEntity(partialEntity); + } } result.setType(lineageRelationship.getType()); result.setDegree(lineageRelationship.getDegree()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 5dbd282580c87..29447e6e7ef22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -212,7 +212,7 @@ public static boolean hasChildDomains( // Limit count to 1 for existence check final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); + context.getOperationContext(), DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1); return (searchResult.getNumEntities() > 0); } @@ -226,7 +226,7 @@ private static Map getDomainsByNameAndParent( final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); + context.getOperationContext(), DOMAIN_ENTITY_NAME, filter, null, 0, 1000); final Set domainUrns = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 1c8f43a490173..aec3848f96640 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -60,14 +59,13 @@ public CompletableFuture get(DataFetchingEnvironment e final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.OWNERSHIP_TYPE_ENTITY_NAME, query, buildFilter(filters, Collections.emptyList()), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index c1e4bb7f83316..1d26aab85f463 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -61,7 +61,7 @@ public CompletableFuture get(final DataFetchingEnvironment e final Filter filter = ResolverUtils.buildFilter(facetFilters, Collections.emptyList()); return _policyFetcher - .fetchPolicies(start, query, count, filter, context.getAuthentication()) + .fetchPolicies(context.getOperationContext(), start, query, count, filter) .thenApply( policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 5292adbe3aac3..34ef616c61e41 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.types.post.PostMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -57,14 +56,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all Post Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), POST_ENTITY_NAME, query, null, sortCriterion, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get and hydrate all Posts. final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index fec5bb120eeba..6fcc0fee76303 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -66,14 +65,16 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all Query Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags( + flags -> flags.setFulltext(true).setSkipHighlighting(true)), QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + count); final ListQueriesResult result = new ListQueriesResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index e65666117b4fa..86d1b8bab669c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -4,6 +4,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ContentParams; import com.linkedin.datahub.graphql.generated.EntityProfileParams; import com.linkedin.datahub.graphql.generated.FacetFilter; @@ -46,6 +47,7 @@ public class ListRecommendationsResolver @WithSpan @Override public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); @@ -55,7 +57,7 @@ public CompletableFuture get(DataFetchingEnvironment log.debug("Listing recommendations for input {}", input); List modules = _recommendationsService.listRecommendations( - Urn.createFromString(input.getUserUrn()), + context.getOperationContext(), mapRequestContext(input.getRequestContext()), input.getLimit()); return ListRecommendationsResult.builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 61ecf09fc91a5..3bf11b9febc63 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -38,7 +38,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm () -> { try { return new InviteToken( - _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, true)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to create invite token for role %s", roleUrnStr), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 066753c4f7559..039a1730e7e67 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -38,7 +38,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm () -> { try { return new InviteToken( - _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, false)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to get invite token for role %s", roleUrnStr), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index a1dd9219f6549..5c0ea6651f67e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.types.role.mappers.DataHubRoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -53,13 +52,12 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all role Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get and hydrate all users. final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index b54987dc0e9b0..44b998219f01e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -70,6 +70,7 @@ public CompletableFuture get(DataFetchingEnvironment environme try { return mapAggregateResults( _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -81,9 +82,7 @@ public CompletableFuture get(DataFetchingEnvironment environme : inputFilter, 0, 0, // 0 entity count because we don't want resolved entities - searchFlags, null, - ResolverUtils.getAuthentication(environment), facets)); } catch (Exception e) { log.error( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 1a380781385c3..21007bf228a70 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -5,13 +5,12 @@ import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; @@ -23,6 +22,7 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Comparator; import java.util.List; @@ -48,6 +48,7 @@ public class GetQuickFiltersResolver public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); @@ -58,7 +59,7 @@ public CompletableFuture get(final DataFetchingEnvironmen try { final SearchResult searchResult = - getSearchResults(ResolverUtils.getAuthentication(environment), input); + getSearchResults(context.getOperationContext(), input); final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); @@ -74,13 +75,17 @@ public CompletableFuture get(final DataFetchingEnvironmen }); } - /** Do a star search with view filter applied to get info about all data in this instance. */ + /** + * Do a star search with view filter applied to get info about all data in this instance. Include + * aggregations. + */ private SearchResult getSearchResults( - @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + @Nonnull final OperationContext opContext, @Nonnull final GetQuickFiltersInput input) throws Exception { final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), opContext.getAuthentication()) : null; final List entityNames = SEARCHABLE_ENTITY_TYPES.stream() @@ -88,6 +93,7 @@ private SearchResult getSearchResults( .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( + opContext.withSearchFlags(flags -> flags.setSkipAggregates(false)), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -99,8 +105,7 @@ private SearchResult getSearchResults( 0, 0, null, - null, - authentication); + null); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index 658138ae6e3dc..79101c9b6a48f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -72,10 +72,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) : null; final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } else { + searchFlags = null; } try { @@ -90,6 +92,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) return UrnScrollResultsMapper.map( _entityClient.scrollAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -101,9 +106,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, scrollId, keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); + count)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 0af0a3827b1bb..fa82cad6fffde 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -93,7 +93,7 @@ public CompletableFuture get(DataFetchingEnvironment scrollId, count); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { @@ -102,9 +102,14 @@ public CompletableFuture get(DataFetchingEnvironment .setSkipCache(inputFlags.getSkipCache()) .setFulltext(inputFlags.getFulltext()) .setMaxAggValues(inputFlags.getMaxAggValues()); + } else { + searchFlags = null; } return UrnScrollAcrossLineageResultsMapper.map( _entityClient.scrollAcrossLineage( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), urn, resolvedDirection, entityNames, @@ -116,9 +121,7 @@ public CompletableFuture get(DataFetchingEnvironment keepAlive, count, startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); + endTimeMillis)); } catch (RemoteInvocationException e) { log.error( "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index f8178e3b396cb..6f4bcf937f2fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -77,6 +77,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) return UrnSearchResultsMapper.map( _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -88,9 +89,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, start, count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); + sortCriterion)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 4a0eacaf09671..9937dac2447f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -6,6 +6,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -78,6 +79,8 @@ private List getEntityNamesFromInput(List inputTypes) { public CompletableFuture get(DataFetchingEnvironment environment) throws URISyntaxException { log.debug("Entering search across lineage graphql resolver"); + final QueryContext context = environment.getContext(); + final SearchAcrossLineageInput input = bindArgument(environment.getArgument("input"), SearchAcrossLineageInput.class); @@ -125,7 +128,7 @@ public CompletableFuture get(DataFetchingEnvironment final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); @@ -137,6 +140,7 @@ public CompletableFuture get(DataFetchingEnvironment } LineageSearchResult salResults = _entityClient.searchAcrossLineage( + context.getOperationContext().withSearchFlags(flags -> searchFlags), urn, resolvedDirection, entityNames, @@ -147,9 +151,7 @@ public CompletableFuture get(DataFetchingEnvironment start, count, startTimeMillis, - endTimeMillis, - searchFlags, - getAuthentication(environment)); + endTimeMillis); return UrnSearchAcrossLineageResultsMapper.map(salResults); } catch (RemoteInvocationException e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 7428207034f5d..7d1b7d6ef2838 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -4,6 +4,7 @@ import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; @@ -48,6 +49,7 @@ public class SearchResolver implements DataFetcher get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final SearchInput input = bindArgument(environment.getArgument("input"), SearchInput.class); final String entityName = EntityTypeMapper.getName(input.getType()); // escape forward slash since it is a reserved character in Elasticsearch @@ -78,14 +80,13 @@ public CompletableFuture get(DataFetchingEnvironment environment) return UrnSearchResultsMapper.map( _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> searchFlags), entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), null, start, - count, - ResolverUtils.getAuthentication(environment), - searchFlags)); + count)); } catch (Exception e) { log.error( "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index f345d9ceb21e5..3f4a0367af05a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -12,7 +12,6 @@ import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -57,13 +56,14 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all group Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.TEST_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Now that we have entities we can bind this to a result. final ListTestsResult result = new ListTestsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 215d53299c8ac..ef03a67a55c82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.types.corpuser.mappers.CorpUserMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -57,13 +56,14 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all policy Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get hydrate all users. final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index caa37f8264854..80d33b84b4c76 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -15,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -68,14 +67,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DATAHUB_VIEW_ENTITY_NAME, query, buildFilters(), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListViewsResult result = new ListViewsResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index 945d2d50bcc3e..fd029f9d6d3b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -71,14 +70,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DATAHUB_VIEW_ENTITY_NAME, query, buildFilters(viewType, context.getActorUrn()), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListViewsResult result = new ListViewsResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index 77168356682c2..ffd5bffdd43d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -39,7 +39,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -154,13 +153,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "chart", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -173,7 +171,7 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete(context.getOperationContext(), "chart", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -190,7 +188,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "chart", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "chart", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index faede5cf9bb1b..16f1909af09f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -45,6 +45,12 @@ public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags if (searchFlags.getGetSuggestions() != null) { result.setGetSuggestions(searchFlags.getGetSuggestions()); } + if (searchFlags.getIncludeSoftDeleted() != null) { + result.setIncludeSoftDeleted(searchFlags.getIncludeSoftDeleted()); + } + if (searchFlags.getIncludeRestricted() != null) { + result.setIncludeRestricted(searchFlags.getIncludeRestricted()); + } if (searchFlags.getGroupingSpec() != null && searchFlags.getGroupingSpec().getGroupingCriteria() != null) { result.setGroupingSpec( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 3ca018ea6f5c7..a859cd6c79e80 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -31,6 +31,7 @@ import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.QueryEntity; +import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; @@ -204,6 +205,11 @@ public Entity apply(Urn input) { ((QueryEntity) partialEntity).setUrn(input.toString()); ((QueryEntity) partialEntity).setType(EntityType.QUERY); } + if (input.getEntityType().equals(RESTRICTED_ENTITY_NAME)) { + partialEntity = new Restricted(); + ((Restricted) partialEntity).setUrn(input.toString()); + ((Restricted) partialEntity).setType(EntityType.RESTRICTED); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index c6f2303fec792..6408e2d6779fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,7 +18,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -128,13 +127,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -147,7 +145,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 371cf6b280c20..4eb038632c6c6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -30,7 +30,6 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -116,13 +115,12 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "corpGroup", query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -135,7 +133,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), "corpGroup", query, filters, limit); return AutoCompleteResultsMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index 5749eef970fce..3dab88fcc300b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -32,7 +32,6 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -116,13 +115,12 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "corpuser", query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -135,7 +133,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), "corpuser", query, filters, limit); return AutoCompleteResultsMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 9f12fe7c0eccd..4efcb42cf8e3c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -153,13 +152,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dashboard", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -172,7 +170,8 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), "dashboard", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -189,7 +188,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dashboard", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index fdc60635679f7..fdb0bd603b27a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -147,13 +146,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dataFlow", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -166,7 +164,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), "dataFlow", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -183,7 +182,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataFlow", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index 7593aede32a30..5127bee3f8a8c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -149,13 +148,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dataJob", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -168,7 +166,7 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete(context.getOperationContext(), "dataJob", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -185,7 +183,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataJob", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 6519a493f3991..c45cec34e5e79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -121,7 +121,11 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + context.getOperationContext(), + DATA_PLATFORM_INSTANCE_ENTITY_NAME, + query, + filters, + limit); return AutoCompleteResultsMapper.map(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index 253a84ca34945..a63c4bbbbf1d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -113,7 +113,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + context.getOperationContext(), DATA_PRODUCT_ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index fd31e1d394a92..0ae41eef6b1b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -168,13 +167,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -187,7 +185,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -204,7 +203,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataset", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index cfacf0031c318..d18633c763eed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -115,7 +115,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + context.getOperationContext(), Constants.DOMAIN_ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java index 23e793782e8dc..3ecd01e99056b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -43,6 +43,7 @@ public class EntityTypeMapper { .put(EntityType.SCHEMA_FIELD, "schemaField") .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) + .put(EntityType.RESTRICTED, Constants.RESTRICTED_ENTITY_NAME) .build(); private static final Map ENTITY_NAME_TO_TYPE = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 9114ff2670906..b6b813cddf99b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -126,13 +125,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "glossaryTerm", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -146,7 +144,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "glossaryTerm", query, filters, limit, context.getAuthentication()); + context.getOperationContext(), "glossaryTerm", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -163,7 +161,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "glossaryTerm", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index da3ddd1115437..b55f574045393 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -111,13 +110,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlFeatureTable", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -131,7 +129,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlFeatureTable", query, filters, limit, context.getAuthentication()); + context.getOperationContext(), "mlFeatureTable", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -148,7 +146,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlFeatureTable", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index 6f94ea44cd476..7046ee0f94eeb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -101,13 +100,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlFeature", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -120,7 +118,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete( + context.getOperationContext(), "mlFeature", query, filters, limit); return AutoCompleteResultsMapper.map(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index d505b70effdd4..8865d2acce12d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -111,13 +110,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlModelGroup", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -131,7 +129,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlModelGroup", query, filters, limit, context.getAuthentication()); + context.getOperationContext(), "mlModelGroup", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -148,7 +146,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModelGroup", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index 27b791d78e78e..a1c689d9f5c1d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -106,13 +105,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlModel", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -125,7 +123,7 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete(context.getOperationContext(), "mlModel", query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -142,7 +140,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModel", + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index 10cfe181dd292..cccb05e8fa0f5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -101,13 +100,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlPrimaryKey", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -121,7 +119,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlPrimaryKey", query, filters, limit, context.getAuthentication()); + context.getOperationContext(), "mlPrimaryKey", query, filters, limit); return AutoCompleteResultsMapper.map(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index b6990c3816b53..a8e964581dfd5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -38,7 +38,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -95,13 +94,12 @@ public SearchResults search( final Map facetFilters = Collections.emptyMap(); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), NOTEBOOK_ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -115,7 +113,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + context.getOperationContext(), NOTEBOOK_ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } @@ -135,7 +133,12 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + NOTEBOOK_ENTITY_NAME, + pathStr, + facetFilters, + start, + count); return BrowseResultMapper.map(result); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java new file mode 100644 index 0000000000000..61186fc9f77e5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.entity.EntityResponse; +import com.linkedin.metadata.service.RestrictedService; +import javax.annotation.Nonnull; + +public class RestrictedMapper { + + public static final RestrictedMapper INSTANCE = new RestrictedMapper(); + + public static Restricted map( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + return INSTANCE.apply(entityResponse, restrictedService); + } + + public Restricted apply( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + final Restricted result = new Restricted(); + Urn entityUrn = entityResponse.getUrn(); + String restrictedUrnString = restrictedService.encryptRestrictedUrn(entityUrn).toString(); + + result.setUrn(restrictedUrnString); + result.setType(EntityType.RESTRICTED); + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java new file mode 100644 index 0000000000000..a2030bb596d10 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java @@ -0,0 +1,103 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.datahub.graphql.types.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.service.RestrictedService; +import graphql.execution.DataFetcherResult; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class RestrictedType implements EntityType { + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(); + + private final EntityClient _entityClient; + private final RestrictedService _restrictedService; + + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.RESTRICTED; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Restricted.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List restrictedUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List entityUrns = + restrictedUrns.stream() + .map(_restrictedService::decryptRestrictedUrn) + .collect(Collectors.toList()); + + // Create a map for entityType: entityUrns so we can fetch by entity type below + final Map> entityTypeToUrns = createEntityTypeToUrnsMap(entityUrns); + + try { + // Fetch from the DB for each entity type and add to one result map + final Map entities = new HashMap<>(); + entityTypeToUrns + .keySet() + .forEach( + entityType -> { + try { + entities.putAll( + _entityClient.batchGetV2( + entityType, + new HashSet<>(entityTypeToUrns.get(entityType)), + ASPECTS_TO_FETCH, + context.getAuthentication())); + } catch (Exception e) { + throw new RuntimeException("Failed to fetch restricted entities", e); + } + }); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(RestrictedMapper.map(gmsResult, _restrictedService)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } + + private Map> createEntityTypeToUrnsMap(final List urns) { + final Map> entityTypeToUrns = new HashMap<>(); + urns.forEach( + urn -> { + String entityType = urn.getEntityType(); + List existingUrns = + entityTypeToUrns.computeIfAbsent(entityType, k -> new ArrayList<>()); + existingUrns.add(urn); + }); + return entityTypeToUrns; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index d51e0d06c0fda..cc77ee46d65dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -18,7 +18,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -104,13 +103,12 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -124,7 +122,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + context.getOperationContext(), Constants.ROLE_ENTITY_NAME, query, filters, limit); return AutoCompleteResultsMapper.map(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index c56833cc817eb..a3c9bc380bdcf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -116,13 +115,12 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "tag", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); return UrnSearchResultsMapper.map(searchResult); } @@ -135,7 +133,7 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); + _entityClient.autoComplete(context.getOperationContext(), "tag", query, filters, limit); return AutoCompleteResultsMapper.map(result); } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index d8dedf20e3c0c..855cf19f0cb3b 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -961,6 +961,11 @@ enum EntityType { """ ENTITY_TYPE + """" + A type of entity that is restricted to the user + """ + RESTRICTED + """ Another entity type - refer to a provided entity type urn. """ @@ -11834,3 +11839,29 @@ type EntityTypeInfo { """ description: String } + +""" +A restricted entity that the user does not have full permissions to view. +This entity type does not relate to an entity type in the database. +""" +type Restricted implements Entity & EntityWithRelationships { + """ + The primary key of the restricted entity + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Edges extending from this entity grouped by direction in the lineage graph + """ + lineage(input: LineageInput!): EntityLineageResult +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index a906362cee185..60b3d73d77fdb 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -152,6 +152,16 @@ input SearchFlags { Note: This is an experimental feature and is subject to change. """ groupingSpec: GroupingSpec + + """ + Whether to include soft deleted entities + """ + includeSoftDeleted: Boolean + + """ + Whether to include restricted entities + """ + includeRestricted: Boolean } """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index ac07053e59d75..e8c968ab768f8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -17,6 +17,8 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; import org.mockito.Mockito; @@ -48,10 +50,15 @@ public static QueryContext getMockAllowContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()) - .thenReturn( - new Authentication( - new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + Mockito.when(mockContext.getAuthentication()).thenReturn(authentication); + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization( + mock(EntityRegistry.class), mockAuthorizer, authentication); + Mockito.when(mockContext.getOperationContext()).thenReturn(operationContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } @@ -69,6 +76,8 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } @@ -90,6 +99,8 @@ public static QueryContext getMockDenyContext(String actorUrn) { .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } @@ -107,6 +118,8 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 419eb71d5e143..ad30e48d8361b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -11,7 +12,6 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -43,14 +43,13 @@ public void testGetSuccess() throws Exception { final Authentication testAuth = getAuthentication(mockEnv); Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(buildFilter(filters, Collections.emptyList())), Mockito.any(SortCriterion.class), Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) + Mockito.eq(input.getCount()))) .thenReturn( new SearchResult() .setFrom(0) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 41797fac636f1..892ba4e1ebb3e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.browse.BrowseResultGroupV2Array; import com.linkedin.metadata.browse.BrowseResultMetadata; import com.linkedin.metadata.browse.BrowseResultV2; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; @@ -257,14 +256,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.browseV2( + Mockito.any(), Mockito.eq(ImmutableList.of(entityName)), Mockito.eq(path), Mockito.eq(filter), Mockito.eq(query), Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.nullable(SearchFlags.class))) + Mockito.eq(limit))) .thenReturn(result); return client; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 1203f4e22bdc2..c63c9bccab68b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -22,6 +24,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -34,7 +37,7 @@ public class ContainerEntitiesResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; @@ -47,6 +50,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), Mockito.eq("*"), Mockito.eq( @@ -59,8 +63,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(0), Mockito.eq(20), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn( new SearchResult() .setFrom(0) @@ -76,9 +79,10 @@ public void testGetSuccess() throws Exception { ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 6184760abfabd..33cd1cb63d621 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -71,14 +72,14 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -121,12 +122,12 @@ public void testGetSuccessNoParentDomain() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -194,14 +195,14 @@ public void testGetNameConflict() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn( new SearchResult() .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 5632654a26ad9..69fb98fbf9e31 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -31,12 +32,12 @@ public void testGetSuccess() throws Exception { // Domain has 0 child domains Mockito.when( mockClient.filter( + any(), Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(1), - Mockito.any())) + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); @@ -60,12 +61,12 @@ public void testDeleteWithChildDomains() throws Exception { // Domain has child domains Mockito.when( mockClient.filter( + any(), Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(1), - Mockito.any())) + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index c6e6cdc7f018e..f970f9e2ea431 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -24,6 +26,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.stream.Collectors; import org.mockito.Mockito; @@ -50,6 +53,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.eq( SEARCHABLE_ENTITY_TYPES.stream() .map(EntityTypeMapper::getName) @@ -65,8 +69,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(0), Mockito.eq(20), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn( new SearchResult() .setFrom(0) @@ -84,6 +87,7 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index ffc3e823d8351..53a16ed5f6cc8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertThrows; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -43,6 +42,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), @@ -51,9 +51,7 @@ public void testGetSuccess() throws Exception { .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING)), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -87,6 +85,7 @@ public void testGetSuccessNoParentDomain() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(null)), @@ -95,9 +94,7 @@ public void testGetSuccessNoParentDomain() throws Exception { .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING)), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -139,13 +136,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq("*"), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -155,13 +151,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 1aa7f5aef467c..83ebe481708b5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -53,14 +54,14 @@ private MetadataChangeProposal setupTests( Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( name, Urn.createFromString(PARENT_DOMAIN_URN))), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 6653b19d6ef2b..72937cb650368 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.assertThrows; import com.datahub.authentication.Authentication; @@ -127,12 +128,12 @@ public void testGetFailureExistingTermSameName() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), Mockito.any(), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(1000), - Mockito.any())) + Mockito.eq(1000))) .thenReturn( new SearchResult() .setEntities( @@ -177,12 +178,12 @@ private EntityClient initMockClient() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), Mockito.any(), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(1000), - Mockito.any())) + Mockito.eq(1000))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( mockClient.batchGetV2( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index b879baf1e65dc..60787fc47c88a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +23,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -33,6 +36,7 @@ public class GetRootGlossaryNodesResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -41,12 +45,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.eq(buildGlossaryEntitiesFilter()), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class))) + Mockito.eq(100))) .thenReturn( new SearchResult() .setEntities( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 201bea752d53f..51760ff9d37f2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +23,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -33,6 +36,7 @@ public class GetRootGlossaryTermsResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -41,12 +45,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), Mockito.eq(buildGlossaryEntitiesFilter()), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class))) + Mockito.eq(100))) .thenReturn( new SearchResult() .setEntities( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index 062c1da5e038d..5b858b810657a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -125,13 +126,13 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java index a3f4b508dfc3e..86c7b86978127 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.incident; import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -34,6 +35,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.utils.QueryUtils; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashMap; import java.util.Map; import org.mockito.Mockito; @@ -86,12 +88,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + Mockito.any(), Mockito.eq(Constants.INCIDENT_ENTITY_NAME), Mockito.eq(expectedFilter), Mockito.eq(expectedSort), Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) + Mockito.eq(10))) .thenReturn( new SearchResult() .setFrom(0) @@ -120,6 +122,7 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index e5cb43c4dab61..e0555f5886b8b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +22,7 @@ import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; import com.linkedin.secret.DataHubSecretValue; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; public class IngestTestUtils { @@ -43,6 +45,7 @@ public static QueryContext getMockAllowContext() { Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); return mockContext; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index fdb150e692441..d64a41d59b30e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -27,6 +29,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -36,17 +39,17 @@ public class IngestionSourceExecutionRequestsResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); // Mock filter response Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), Mockito.any(Filter.class), Mockito.any(SortCriterion.class), Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) + Mockito.eq(10))) .thenReturn( new SearchResult() .setFrom(0) @@ -101,7 +104,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -121,12 +125,12 @@ public void testGetSuccess() throws Exception { @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); @@ -140,18 +144,13 @@ public void testGetUnauthorized() throws Exception { .batchGetV2( Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) - .list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + .list(Mockito.any(), Mockito.any(), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .batchGetV2( @@ -160,7 +159,7 @@ public void testGetEntityClientException() throws Exception { new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index 7d89f4aafa01a..7a1876466573d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -14,7 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; @@ -22,6 +23,7 @@ import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -39,14 +41,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.SECRETS_ENTITY_NAME), Mockito.eq(""), Mockito.eq(null), Mockito.any(SortCriterion.class), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -101,6 +102,7 @@ public void testGetUnauthorized() throws Exception { QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) @@ -108,14 +110,13 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.eq(null), Mockito.any(SortCriterion.class), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index a86d67fcd15c1..4dfce0e0c2ee8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -16,12 +18,12 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubIngestionSourceKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -42,14 +44,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), Mockito.eq(""), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -85,6 +86,7 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -116,13 +118,12 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index fd7baf6af7469..d18bc3aa31f89 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -11,7 +12,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.OwnershipTypeKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -40,14 +40,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), Mockito.eq(""), Mockito.eq(null), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -90,13 +89,12 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index 6c475cdc7f5a8..340e8cbf8514c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -33,7 +32,6 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -120,14 +118,7 @@ public void testListPosts() throws Exception { ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); when(_entityClient.search( - eq(POST_ENTITY_NAME), - any(), - eq(null), - any(), - anyInt(), - anyInt(), - eq(_authentication), - Mockito.eq(new SearchFlags().setFulltext(true)))) + any(), eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt())) .thenReturn(roleSearchResult); when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) .thenReturn(_entityResponseMap); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 8a56b142e5b5e..9ed1d5001b75c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -17,7 +17,6 @@ import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -62,6 +61,7 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq( input.getQuery() == null @@ -73,9 +73,7 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { .setField(ListQueriesResolver.CREATED_AT_FIELD) .setOrder(SortOrder.DESCENDING)), Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + Mockito.eq(input.getCount()))) .thenReturn( new SearchResult() .setFrom(0) @@ -116,13 +114,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq("*"), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.anyInt()); } @Test @@ -132,13 +129,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.anyInt()); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 9197d1b18c0c9..e9d5ef00e74dd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -9,6 +9,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -42,7 +43,7 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(true))) .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index 8e761454cb06c..78d848e882b6b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -9,6 +9,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -19,13 +20,15 @@ public class GetInviteTokenResolverTest { private GetInviteTokenResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; private Authentication _authentication; + private OperationContext opContext; @BeforeMethod public void setupTest() throws Exception { _inviteTokenService = mock(InviteTokenService.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - + opContext = mock(OperationContext.class); + when(opContext.getAuthentication()).thenReturn(_authentication); _resolver = new GetInviteTokenResolver(_inviteTokenService); } @@ -42,7 +45,7 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(false))) .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index d956295faa180..ab2f852d83040 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -19,7 +19,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -27,7 +26,6 @@ import com.linkedin.policy.DataHubRoleInfo; import graphql.schema.DataFetchingEnvironment; import java.util.Map; -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -102,13 +100,7 @@ public void testListRoles() throws Exception { new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); when(_entityClient.search( - eq(DATAHUB_ROLE_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - any(), - Mockito.eq(new SearchFlags().setFulltext(true)))) + any(), eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(roleSearchResult); when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) .thenReturn(_entityResponseMap); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index 4d56cc3d52af8..58fbadf7e0d7f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -318,14 +319,14 @@ public static void testErrorFetchingResults() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final AggregateAcrossEntitiesResolver resolver = @@ -392,14 +393,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), Mockito.eq(facets))) .thenReturn(result); return client; @@ -416,14 +416,13 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), Mockito.eq(facets)); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 3b69337acfbd0..ea0765ba9377c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -192,11 +193,11 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( + any(), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), - Mockito.eq(10), - Mockito.any(Authentication.class)); + Mockito.eq(10)); } @Test @@ -225,11 +226,11 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.autoComplete( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class))) + Mockito.eq(limit))) .thenReturn(result); return client; } @@ -246,11 +247,11 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class)); + Mockito.eq(limit)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index f5accdfb02043..1f038427c9aaa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -2,8 +2,8 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; @@ -108,14 +108,14 @@ public static void testGetQuickFiltersFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -294,14 +294,14 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn(result); return client; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index 0b8c1f1aeb83f..1ef44bbed4cbc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -431,14 +432,14 @@ public static void testApplyViewErrorFetchingView() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final SearchAcrossEntitiesResolver resolver = @@ -480,14 +481,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn(result); return client; } @@ -502,14 +502,13 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class)); + Mockito.eq(null)); } private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index a50591b7fc399..b5b7e78aec484 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -16,7 +16,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.AggregationMetadataArray; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchEntityArray; @@ -106,6 +105,7 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); when(_entityClient.searchAcrossLineage( + any(), eq(UrnUtils.getUrn(SOURCE_URN_STRING)), eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), anyList(), @@ -116,9 +116,7 @@ public void testSearchAcrossLineage() throws Exception { eq(START), eq(COUNT), eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))) + eq(END_TIMESTAMP_MILLIS))) .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 9716799628a45..a5310a052f613 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -2,8 +2,8 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; -import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -65,7 +65,9 @@ public void testDefaultSearchFlags() throws Exception { .setSkipAggregates(false) .setSkipHighlighting(true) // empty/wildcard .setMaxAggValues(20) - .setSkipCache(false), + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), true)); } @@ -136,7 +138,9 @@ public void testNonWildCardSearchFlags() throws Exception { .setSkipAggregates(false) .setSkipHighlighting(false) // empty/wildcard .setMaxAggValues(20) - .setSkipCache(false), + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), true)); } @@ -144,14 +148,13 @@ private EntityClient initMockSearchEntityClient() throws Exception { EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.search( + any(), Mockito.anyString(), Mockito.anyString(), Mockito.any(), Mockito.any(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.any())) + Mockito.anyInt())) .thenReturn( new SearchResult() .setEntities(new SearchEntityArray()) @@ -174,14 +177,13 @@ private void verifyMockSearchEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .search( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(sortCriterion), Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags)); + Mockito.eq(limit)); } private SearchResolverTest() {} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 6075425d09c05..5e3cd539cade7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,16 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -34,13 +33,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.TEST_ENTITY_NAME), Mockito.eq(""), Mockito.eq(Collections.emptyMap()), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -80,14 +78,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) - .search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test @@ -96,14 +87,7 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 8c30c17201bc6..a3b9e25e99225 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -15,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -44,6 +43,7 @@ public void testGetSuccessInput() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -67,9 +67,7 @@ public void testGetSuccessInput() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -112,13 +110,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -128,13 +125,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 85e20cd656fcd..99b0e76976748 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListMyViewsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -46,6 +45,7 @@ public void testGetSuccessInput1() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -78,9 +78,7 @@ public void testGetSuccessInput1() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -113,6 +111,7 @@ public void testGetSuccessInput2() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -135,9 +134,7 @@ public void testGetSuccessInput2() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -178,13 +175,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -194,13 +190,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index eee27096e2238..126df9187bc23 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -7,12 +7,13 @@ import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; import java.util.List; import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; import picocli.CommandLine; @@ -51,18 +52,18 @@ private static final class Args { @Named("removeUnknownAspects") private RemoveUnknownAspects removeUnknownAspects; - @Inject - @Named("buildIndices") - private BuildIndices buildIndices; - - @Inject - @Named("cleanIndices") - private CleanIndices cleanIndices; - - @Inject + @Autowired(required = false) @Named("systemUpdate") private SystemUpdate systemUpdate; + @Autowired(required = false) + @Named("systemUpdateBlocking") + private SystemUpdateBlocking systemUpdateBlocking; + + @Autowired(required = false) + @Named("systemUpdateNonBlocking") + private SystemUpdateNonBlocking systemUpdateNonBlocking; + @Override public void run(String... cmdLineArgs) { _upgradeManager.register(noCodeUpgrade); @@ -70,9 +71,15 @@ public void run(String... cmdLineArgs) { _upgradeManager.register(restoreIndices); _upgradeManager.register(restoreBackup); _upgradeManager.register(removeUnknownAspects); - _upgradeManager.register(buildIndices); - _upgradeManager.register(cleanIndices); - _upgradeManager.register(systemUpdate); + if (systemUpdate != null) { + _upgradeManager.register(systemUpdate); + } + if (systemUpdateBlocking != null) { + _upgradeManager.register(systemUpdateBlocking); + } + if (systemUpdateNonBlocking != null) { + _upgradeManager.register(systemUpdateNonBlocking); + } final Args args = new Args(); new CommandLine(args).setCaseInsensitiveEnumValuesAllowed(true).parseArgs(cmdLineArgs); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 2b2f4648f76e7..a33722d7761cc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -1,23 +1,28 @@ package com.linkedin.datahub.upgrade.config; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.browsepaths.BackfillBrowsePathsV2; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2( + public NonBlockingSystemUpgrade backfillBrowsePathsV2( + final OperationContext opContext, EntityService entityService, SearchService searchService, @Value("${systemUpdate.browsePathsV2.enabled}") final boolean enabled, @Value("${systemUpdate.browsePathsV2.reprocess.enabled}") final boolean reprocessEnabled, @Value("${systemUpdate.browsePathsV2.batchSize}") final Integer batchSize) { return new BackfillBrowsePathsV2( - entityService, searchService, enabled, reprocessEnabled, batchSize); + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java new file mode 100644 index 0000000000000..3ca397a8ce268 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java @@ -0,0 +1,28 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.ownershiptypes.OwnershipTypes; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) +public class BackfillOwnershipTypesConfig { + + @Bean + public NonBlockingSystemUpgrade backfillOwnershipTypes( + final OperationContext opContext, + final EntityService entityService, + final SearchService searchService, + @Value("${systemUpdate.ownershipTypes.enabled}") final boolean enabled, + @Value("${systemUpdate.ownershipTypes.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.ownershipTypes.batchSize}") final Integer batchSize) { + return new OwnershipTypes( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java index 6da85a5c16979..7226ec267dbbc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.upgrade.config; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields; +import com.linkedin.datahub.upgrade.system.policyfields.BackfillPolicyFields; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class BackfillPolicyFieldsConfig { @Bean public BackfillPolicyFields backfillPolicyFields( + final OperationContext opContext, EntityService entityService, SearchService searchService, @Value("${systemUpdate.policyFields.enabled}") final boolean enabled, @Value("${systemUpdate.policyFields.reprocess.enabled}") final boolean reprocessEnabled, @Value("${systemUpdate.policyFields.batchSize}") final Integer batchSize) { return new BackfillPolicyFields( - entityService, searchService, enabled, reprocessEnabled, batchSize); + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index caa45988733df..3510fa513b3b9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; @@ -10,12 +11,14 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.BlockingSystemUpdateCondition.class) public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices( + public BlockingSystemUpgrade buildIndices( final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, final EntitySearchService entitySearchService, diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 5bd7244a92e45..4f54b01459625 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; @@ -8,12 +9,14 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class CleanIndicesConfig { - @Bean(name = "cleanIndices") - public CleanIndices cleanIndices( + @Bean + public NonBlockingSystemUpgrade cleanIndices( final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, final EntitySearchService entitySearchService, diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java index 83dad80944f5f..0281ff4f4169b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java @@ -1,16 +1,19 @@ package com.linkedin.datahub.upgrade.config; -import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL; import com.linkedin.metadata.entity.EntityService; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class ReindexDataJobViaNodesCLLConfig { @Bean - public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL( + public NonBlockingSystemUpgrade reindexDataJobViaNodesCLL( EntityService entityService, @Value("${systemUpdate.dataJobNodeCLL.enabled}") final boolean enabled, @Value("${systemUpdate.dataJobNodeCLL.batchSize}") final Integer batchSize) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java index ea432dfa9f7df..0d65af742a592 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java @@ -1,14 +1,48 @@ package com.linkedin.datahub.upgrade.config; +import java.util.Objects; +import java.util.Set; import org.springframework.boot.ApplicationArguments; import org.springframework.context.annotation.Condition; import org.springframework.context.annotation.ConditionContext; import org.springframework.core.type.AnnotatedTypeMetadata; public class SystemUpdateCondition implements Condition { + public static final String SYSTEM_UPDATE_ARG = "SystemUpdate"; + public static final String BLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "Blocking"; + public static final String NONBLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "NonBlocking"; + public static final Set SYSTEM_UPDATE_ARGS = + Set.of(SYSTEM_UPDATE_ARG, BLOCKING_SYSTEM_UPDATE_ARG, NONBLOCKING_SYSTEM_UPDATE_ARG); + @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { return context.getBeanFactory().getBean(ApplicationArguments.class).getNonOptionArgs().stream() - .anyMatch("SystemUpdate"::equals); + .filter(Objects::nonNull) + .anyMatch(SYSTEM_UPDATE_ARGS::contains); + } + + public static class BlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch(arg -> SYSTEM_UPDATE_ARG.equals(arg) || BLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } + } + + public static class NonBlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch( + arg -> SYSTEM_UPDATE_ARG.equals(arg) || NONBLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 17ad56ec80bac..bea38b616f86f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -1,11 +1,11 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields; -import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; @@ -14,8 +14,12 @@ import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.TopicConvention; +import java.util.List; +import javax.annotation.PostConstruct; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.KafkaProducer; @@ -32,27 +36,28 @@ @Slf4j @Configuration +@Conditional(SystemUpdateCondition.class) public class SystemUpdateConfig { + @Bean(name = "systemUpdate") public SystemUpdate systemUpdate( - final BuildIndices buildIndices, - final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, - @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2, - final ReindexDataJobViaNodesCLL reindexDataJobViaNodesCLL, - final BackfillPolicyFields backfillPolicyFields) { + final List blockingSystemUpgrades, + final List nonBlockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdate(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } - String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate( - buildIndices, - cleanIndices, - kafkaEventProducer, - version, - backfillBrowsePathsV2, - reindexDataJobViaNodesCLL, - backfillPolicyFields); + @Bean(name = "systemUpdateBlocking") + public SystemUpdateBlocking systemUpdateBlocking( + final List blockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdateBlocking(blockingSystemUpgrades, List.of(), dataHubStartupStep); + } + + @Bean(name = "systemUpdateNonBlocking") + public SystemUpdateNonBlocking systemUpdateNonBlocking( + final List nonBlockingSystemUpgrades) { + return new SystemUpdateNonBlocking(List.of(), nonBlockingSystemUpgrades, null); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -63,6 +68,15 @@ public String getRevision() { return revision; } + @Bean + public DataHubStartupStep dataHubStartupStep( + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision) { + return new DataHubStartupStep( + kafkaEventProducer, String.format("%s-%s", gitVersion.getVersion(), revision)); + } + @Autowired @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; @@ -92,7 +106,6 @@ protected KafkaEventProducer duheKafkaEventProducer( */ @Primary @Bean(name = "kafkaEventProducer") - @Conditional(SystemUpdateCondition.class) @ConditionalOnProperty( name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) @@ -100,4 +113,15 @@ protected KafkaEventProducer kafkaEventProducer( @Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) { return kafkaEventProducer; } + + @Configuration + public static class SystemUpdateSetup { + @Autowired private EntityService entityService; + @Autowired private EntitySearchService entitySearchService; + + @PostConstruct + protected void postConstruct() { + entitySearchService.postConstruct(entityService); + } + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java new file mode 100644 index 0000000000000..4fae5b2239d11 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface BlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java new file mode 100644 index 0000000000000..fd83f1544a098 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface NonBlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index f02c820066c1c..ad1c6c98fa3fd 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -3,58 +3,48 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeCleanupStep; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields; -import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; -import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import java.util.LinkedList; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import javax.annotation.Nullable; +import lombok.Getter; +import lombok.NonNull; +import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; +@Getter @Slf4j +@Accessors(fluent = true) public class SystemUpdate implements Upgrade { - private final List _preStartupUpgrades; - private final List _postStartupUpgrades; - private final List _steps; + private final List steps; + private final List cleanupSteps; public SystemUpdate( - final BuildIndices buildIndicesJob, - final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, - final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2, - final ReindexDataJobViaNodesCLL upgradeViaNodeCll, - final BackfillPolicyFields backfillPolicyFields) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = - List.of(cleanIndicesJob, backfillBrowsePathsV2, upgradeViaNodeCll, backfillPolicyFields); - } + @NonNull final List blockingSystemUpgrades, + @NonNull final List nonBlockingSystemUpgrades, + @Nullable final DataHubStartupStep dataHubStartupStep) { - @Override - public String id() { - return "SystemUpdate"; - } + steps = new LinkedList<>(); + cleanupSteps = new LinkedList<>(); - @Override - public List steps() { - return Stream.concat( - Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); + // blocking upgrades + steps.addAll(blockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + blockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); + + // emit system update message if blocking upgrade(s) present + if (dataHubStartupStep != null && !blockingSystemUpgrades.isEmpty()) { + steps.add(dataHubStartupStep); + } + + // add non-blocking upgrades last + steps.addAll(nonBlockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + nonBlockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); } @Override - public List cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); + public String id() { + return getClass().getSimpleName(); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java new file mode 100644 index 0000000000000..32841149c467b --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateBlocking extends SystemUpdate { + + public SystemUpdateBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java new file mode 100644 index 0000000000000..3309babc1f6cf --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateNonBlocking extends SystemUpdate { + + public SystemUpdateNonBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java similarity index 66% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java index 9b023e1e239a2..16c039e2a64ab 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java @@ -1,17 +1,19 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; +package com.linkedin.datahub.upgrade.system.browsepaths; import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; -public class BackfillBrowsePathsV2 implements Upgrade { +public class BackfillBrowsePathsV2 implements NonBlockingSystemUpgrade { private final List _steps; public BackfillBrowsePathsV2( + OperationContext opContext, EntityService entityService, SearchService searchService, boolean enabled, @@ -21,7 +23,7 @@ public BackfillBrowsePathsV2( _steps = ImmutableList.of( new BackfillBrowsePathsV2Step( - entityService, searchService, reprocessEnabled, batchSize)); + opContext, entityService, searchService, reprocessEnabled, batchSize)); } else { _steps = ImmutableList.of(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java similarity index 92% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java index 2d64e0052ae82..30674ecc3d00e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; +package com.linkedin.datahub.upgrade.system.browsepaths; import static com.linkedin.metadata.Constants.*; @@ -18,7 +18,6 @@ import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -31,6 +30,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import io.datahubproject.metadata.context.OperationContext; import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; @@ -54,6 +54,7 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { Constants.ML_FEATURE_TABLE_ENTITY_NAME, Constants.ML_FEATURE_ENTITY_NAME); + private final OperationContext opContext; private final EntityService entityService; private final SearchService searchService; @@ -61,10 +62,12 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { private final Integer batchSize; public BackfillBrowsePathsV2Step( + OperationContext opContext, EntityService entityService, SearchService searchService, boolean reprocessEnabled, Integer batchSize) { + this.opContext = opContext; this.searchService = searchService; this.entityService = entityService; this.reprocessEnabled = reprocessEnabled; @@ -110,18 +113,20 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final ScrollResult scrollResult = searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), ImmutableList.of(entityType), "*", filter, null, scrollId, null, - batchSize, - new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipHighlighting(true) - .setSkipAggregates(true)); + batchSize); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; @@ -234,7 +239,8 @@ public boolean skip(UpgradeContext context) { return false; } - boolean previouslyRun = entityService.exists(UPGRADE_ID_URN, true); + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); if (previouslyRun) { log.info("{} was already run. Skipping.", id()); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 970d67be337a6..fea0479876a2e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; @@ -19,7 +19,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class BuildIndices implements Upgrade { +public class BuildIndices implements BlockingSystemUpgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index ad68386622b21..e316481e2b07e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.CleanIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; @@ -16,7 +16,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class CleanIndices implements Upgrade { +public class CleanIndices implements NonBlockingSystemUpgrade { private final List _steps; public CleanIndices( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java new file mode 100644 index 0000000000000..63aacde7ef8ab --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java @@ -0,0 +1,41 @@ +package com.linkedin.datahub.upgrade.system.ownershiptypes; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; + +public class OwnershipTypes implements NonBlockingSystemUpgrade { + + private final List _steps; + + public OwnershipTypes( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new OwnershipTypesStep( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return getClass().getSimpleName(); + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java new file mode 100644 index 0000000000000..4c55f4ddcb31d --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java @@ -0,0 +1,276 @@ +package com.linkedin.datahub.upgrade.system.ownershiptypes; + +import static com.linkedin.metadata.Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.entity.Aspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.mxe.SystemMetadata; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class OwnershipTypesStep implements UpgradeStep { + + private static final String UPGRADE_ID = OwnershipTypes.class.getSimpleName(); + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_PRIMARY_KEY_ENTITY_NAME, + Constants.GLOSSARY_TERM_ENTITY_NAME, + Constants.GLOSSARY_NODE_ENTITY_NAME, + Constants.TAG_ENTITY_NAME, + Constants.ROLE_ENTITY_NAME, + Constants.CORP_GROUP_ENTITY_NAME, + Constants.CORP_USER_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DOMAIN_ENTITY_NAME, + Constants.DATA_PRODUCT_ENTITY_NAME, + Constants.NOTEBOOK_ENTITY_NAME); + + private final OperationContext opContext; + private final EntityService entityService; + private final SearchService searchService; + private final boolean enabled; + private final boolean reprocessEnabled; + private final Integer batchSize; + + public OwnershipTypesStep( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + this.opContext = opContext; + this.entityService = entityService; + this.searchService = searchService; + this.enabled = enabled; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; + } + + @Override + public Function executable() { + return (context) -> { + final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); + + String scrollId = null; + for (String entityType : ENTITY_TYPES_TO_MIGRATE) { + int migratedCount = 0; + do { + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + batchSize, entityType)); + scrollId = ownershipTypes(entityType, auditStamp, scrollId); + migratedCount += batchSize; + } while (scrollId != null); + } + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + private String ownershipTypes(String entityType, AuditStamp auditStamp, String scrollId) { + + final Filter filter; + + if (reprocessEnabled) { + filter = backfillDefaultOwnershipTypesFilter(); + } else { + filter = backfillOwnershipTypesFilter(); + } + + final ScrollResult scrollResult = + searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + batchSize); + + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { + return null; + } + + try { + ingestOwnershipTypes(scrollResult.getEntities(), auditStamp); + } catch (Exception e) { + // don't stop the whole step because of one bad urn or one bad ingestion + log.error( + String.format( + "Error ingesting ownership aspect for urn %s", + scrollResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + e); + } + + return scrollResult.getScrollId(); + } + + private Filter backfillOwnershipTypesFilter() { + // Condition: has `owners` AND does NOT have `ownershipTypes` + Criterion hasOwners = new Criterion(); + hasOwners.setCondition(Condition.EXISTS); + hasOwners.setField("owners"); + // Excludes entities with ownershipTypes + Criterion missingOwnershipTypes = new Criterion(); + missingOwnershipTypes.setCondition(Condition.IS_NULL); + missingOwnershipTypes.setField("ownershipTypes"); + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(hasOwners); + criterionArray.add(missingOwnershipTypes); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private Filter backfillDefaultOwnershipTypesFilter() { + // Condition: has `owners` + Criterion hasOwners = new Criterion(); + hasOwners.setCondition(Condition.EXISTS); + hasOwners.setField("owners"); + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(hasOwners); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private void ingestOwnershipTypes(SearchEntityArray searchBatch, AuditStamp auditStamp) + throws Exception { + Map> existing = + entityService.getLatestAspectObjects( + searchBatch.stream().map(SearchEntity::getEntity).collect(Collectors.toSet()), + Set.of(Constants.OWNERSHIP_ASPECT_NAME)); + + List mcps = + existing.entrySet().stream() + .filter(result -> result.getValue().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) + .map( + result -> { + MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(result.getKey()); + proposal.setEntityType(result.getKey().getEntityType()); + proposal.setAspectName(Constants.OWNERSHIP_ASPECT_NAME); + proposal.setChangeType(ChangeType.UPSERT); + proposal.setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(System.currentTimeMillis())); + proposal.setAspect( + GenericRecordUtils.serializeAspect( + result.getValue().get(Constants.OWNERSHIP_ASPECT_NAME))); + return proposal; + }) + .collect(Collectors.toList()); + + log.debug(String.format("Reingesting ownership for %s urns", mcps.size())); + AspectsBatch batch = AspectsBatchImpl.builder().mcps(mcps, auditStamp, entityService).build(); + + entityService.ingestProposal(batch, false); + } + + @Override + public String id() { + return UPGRADE_ID; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return true; + } + + @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables to determine whether to skip. + */ + public boolean skip(UpgradeContext context) { + if (reprocessEnabled && enabled) { + return false; + } + + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return (previouslyRun || !enabled); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFields.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java similarity index 66% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFields.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java index 3e1d385b87e45..ca568e9192895 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFields.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java @@ -1,16 +1,18 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; +package com.linkedin.datahub.upgrade.system.policyfields; import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; -public class BackfillPolicyFields implements Upgrade { +public class BackfillPolicyFields implements NonBlockingSystemUpgrade { private final List _steps; public BackfillPolicyFields( + OperationContext opContext, EntityService entityService, SearchService searchService, boolean enabled, @@ -20,7 +22,7 @@ public BackfillPolicyFields( _steps = ImmutableList.of( new BackfillPolicyFieldsStep( - entityService, searchService, reprocessEnabled, batchSize)); + opContext, entityService, searchService, reprocessEnabled, batchSize)); } else { _steps = ImmutableList.of(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFieldsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java similarity index 92% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFieldsStep.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java index 27d48aa5e0555..a9b8060f02c10 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillPolicyFieldsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; +package com.linkedin.datahub.upgrade.system.policyfields; import static com.linkedin.metadata.Constants.*; @@ -16,7 +16,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -30,6 +29,7 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.policy.DataHubPolicyInfo; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collections; import java.util.function.Function; @@ -44,16 +44,20 @@ public class BackfillPolicyFieldsStep implements UpgradeStep { private static final String UPGRADE_ID = "BackfillPolicyFieldsStep"; private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private final OperationContext opContext; private final boolean reprocessEnabled; private final Integer batchSize; private final EntityService entityService; private final SearchService _searchService; public BackfillPolicyFieldsStep( + OperationContext opContext, EntityService entityService, SearchService searchService, boolean reprocessEnabled, Integer batchSize) { + this.opContext = opContext; this.entityService = entityService; this._searchService = searchService; this.reprocessEnabled = reprocessEnabled; @@ -108,7 +112,8 @@ public boolean skip(UpgradeContext context) { return false; } - boolean previouslyRun = entityService.exists(UPGRADE_ID_URN, true); + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); if (previouslyRun) { log.info("{} was already run. Skipping.", id()); } @@ -120,18 +125,20 @@ private String backfillPolicies(AuditStamp auditStamp, String scrollId) { final Filter filter = backfillPolicyFieldFilter(); final ScrollResult scrollResult = _searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), ImmutableList.of(Constants.POLICY_ENTITY_NAME), "*", filter, null, scrollId, null, - batchSize, - new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipHighlighting(true) - .setSkipAggregates(true)); + batchSize); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().isEmpty()) { return null; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java similarity index 80% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java index 59975693322d1..c997aa15df989 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java @@ -1,10 +1,8 @@ -package com.linkedin.datahub.upgrade.system.via; - -import static com.linkedin.metadata.Constants.*; +package com.linkedin.datahub.upgrade.system.vianodes; import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.metadata.entity.EntityService; import java.util.List; import lombok.extern.slf4j.Slf4j; @@ -14,7 +12,7 @@ * required to index column-level lineage correctly using via nodes. */ @Slf4j -public class ReindexDataJobViaNodesCLL implements Upgrade { +public class ReindexDataJobViaNodesCLL implements NonBlockingSystemUpgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java similarity index 94% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java index 56166caf5b57e..a6a0331072a11 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.upgrade.system.via; +package com.linkedin.datahub.upgrade.system.vianodes; import static com.linkedin.metadata.Constants.*; @@ -68,7 +68,8 @@ public boolean isOptional() { * variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT to determine whether to skip. */ public boolean skip(UpgradeContext context) { - boolean previouslyRun = entityService.exists(UPGRADE_ID_URN, true); + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); boolean envFlagRecommendsSkip = Boolean.parseBoolean(System.getenv("SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT")); if (previouslyRun) { diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 3e655be900bf2..dc4c3073ee351 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -3,7 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; @@ -14,6 +14,7 @@ @ActiveProfiles("test") @SpringBootTest( + args = {"-u", "SystemUpdate"}, classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { @@ -23,7 +24,7 @@ public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests @Autowired @Named("buildIndices") - private BuildIndices buildIndices; + private BlockingSystemUpgrade buildIndices; @Autowired private ESIndexBuilder esIndexBuilder; diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts index 4f74681570802..fdd8e0afa77d1 100644 --- a/datahub-web-react/src/app/buildEntityRegistry.ts +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -20,6 +20,7 @@ import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; import { RoleEntity } from './entity/Access/RoleEntity'; +import { RestrictedEntity } from './entity/restricted/RestrictedEntity'; export default function buildEntityRegistry() { const registry = new EntityRegistry(); @@ -44,5 +45,6 @@ export default function buildEntityRegistry() { registry.register(new DataPlatformEntity()); registry.register(new DataProductEntity()); registry.register(new DataPlatformInstanceEntity()); + registry.register(new RestrictedEntity()); return registry; } \ No newline at end of file diff --git a/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx b/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx new file mode 100644 index 0000000000000..482709c110d6b --- /dev/null +++ b/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { QuestionOutlined } from '@ant-design/icons'; +import { EntityType, Restricted, SearchResult } from '../../../types.generated'; +import { Entity, IconStyleType, PreviewType } from '../Entity'; +import { getDataForEntityType } from '../shared/containers/profile/utils'; +import RestrictedIcon from '../../../images/restricted.svg'; +import { RestrictedEntityProfile } from './RestrictedEntityProfile'; + +/** + * Definition of the DataHub Data Product entity. + */ +export class RestrictedEntity implements Entity { + type: EntityType = EntityType.Restricted; + + icon = (fontSize: number, styleType: IconStyleType, color?: string) => { + if (styleType === IconStyleType.TAB_VIEW) { + return ; + } + + if (styleType === IconStyleType.HIGHLIGHT) { + return ; + } + + return ( + + ); + }; + + isSearchEnabled = () => false; + + isBrowseEnabled = () => false; + + isLineageEnabled = () => true; + + getAutoCompleteFieldName = () => 'name'; + + getPathName = () => 'restricted'; + + getEntityName = () => 'Restricted'; + + getCollectionName = () => 'Restricted Assets'; + + renderProfile = (_: string) => ; + + renderPreview = (_: PreviewType, _data: Restricted) => { + return ; + }; + + renderSearch = (_result: SearchResult) => { + return ; + }; + + getLineageVizConfig = (entity: Restricted) => { + return { + urn: entity?.urn, + name: 'Restricted Asset', + type: EntityType.Restricted, + icon: RestrictedIcon, + }; + }; + + displayName = (_data: Restricted) => { + return 'Restricted Asset'; + }; + + getOverridePropertiesFromEntity = (_data: Restricted) => { + return {}; + }; + + getGenericEntityProperties = (data: Restricted) => { + return getDataForEntityType({ + data, + entityType: this.type, + getOverrideProperties: this.getOverridePropertiesFromEntity, + }); + }; + + supportedCapabilities = () => { + return new Set([]); + }; + + getGraphName = () => { + return 'restricted'; + }; +} diff --git a/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx b/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx new file mode 100644 index 0000000000000..cf8aa5935e42f --- /dev/null +++ b/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx @@ -0,0 +1,30 @@ +import React from 'react'; +import styled from 'styled-components'; +import { + LogoIcon, + PlatformContentWrapper, + PlatformText, + PreviewImage, +} from '../shared/containers/profile/header/PlatformContent/PlatformContentView'; +import RestrictedIcon from '../../../images/restricted.svg'; +import { EntityTitle } from '../shared/containers/profile/header/EntityName'; + +const SubHeader = styled.div` + margin-top: 8px; + font-size: 14px; +`; + +export function RestrictedEntityProfile() { + return ( + <> + + + + + Restricted + + Restricted Asset + This asset is Restricted. Please request access to see more. + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityName.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityName.tsx index 762bd5f9111a0..702f780f1aa11 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityName.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityName.tsx @@ -7,7 +7,7 @@ import { useEntityRegistry } from '../../../../../useEntityRegistry'; import { useEntityData, useRefetch } from '../../../EntityContext'; import { useGlossaryEntityData } from '../../../GlossaryEntityContext'; -const EntityTitle = styled(Typography.Title)` +export const EntityTitle = styled(Typography.Title)` margin-right: 10px; &&& { diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx index 1090dac501d0b..52cb656f54eb9 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx @@ -13,20 +13,20 @@ import { } from './ParentNodesView'; import ParentEntities from '../../../../../../search/filters/ParentEntities'; -const LogoIcon = styled.span` +export const LogoIcon = styled.span` display: flex; gap: 4px; margin-right: 8px; `; -const PreviewImage = styled(Image)` +export const PreviewImage = styled(Image)` max-height: 17px; width: auto; object-fit: contain; background-color: transparent; `; -const PlatformContentWrapper = styled.div` +export const PlatformContentWrapper = styled.div` display: flex; align-items: center; margin: 0 8px 6px 0; @@ -34,7 +34,7 @@ const PlatformContentWrapper = styled.div` flex: 1; `; -const PlatformText = styled(Typography.Text)` +export const PlatformText = styled(Typography.Text)` font-size: 12px; line-height: 20px; font-weight: 700; diff --git a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx index d9f7d8c2c69de..fcf2198e7e004 100644 --- a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx +++ b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx @@ -18,6 +18,7 @@ import { convertInputFieldsToSchemaFields } from './utils/columnLineageUtils'; import ManageLineageMenu from './manage/ManageLineageMenu'; import { useGetLineageTimeParams } from './utils/useGetLineageTimeParams'; import { EntityHealth } from '../entity/shared/containers/profile/header/EntityHealth'; +import { EntityType } from '../../types.generated'; const CLICK_DELAY_THRESHOLD = 1000; const DRAG_DISTANCE_THRESHOLD = 20; @@ -71,6 +72,7 @@ export default function LineageEntityNode({ const [getAsyncEntityLineage, { data: asyncLineageData, loading }] = useGetEntityLineageLazyQuery(); const isHideSiblingMode = useIsSeparateSiblingsMode(); const areColumnsCollapsed = !!collapsedColumnsNodes[node?.data?.urn || 'noop']; + const isRestricted = node.data.type === EntityType.Restricted; function fetchEntityLineage() { if (node.data.urn) { @@ -92,6 +94,12 @@ export default function LineageEntityNode({ } } + const centerEntity = () => { + if (!isRestricted) { + onEntityCenter({ urn: node.data.urn, type: node.data.type }); + } + }; + useEffect(() => { if (asyncLineageData && asyncLineageData.entity && !hasExpanded && !loading) { const entityAndType = { @@ -231,7 +239,7 @@ export default function LineageEntityNode({ ))} onEntityCenter({ urn: node.data.urn, type: node.data.type })} + onDoubleClick={centerEntity} onClick={(event) => { if ( event.timeStamp < lastMouseDownCoordinates.ts + CLICK_DELAY_THRESHOLD && @@ -311,25 +319,27 @@ export default function LineageEntityNode({ {entityRegistry.getIcon(node.data.type, 16, IconStyleType.SVG)} )} - e.stopPropagation()} - > - onEntityCenter({ urn: node.data.urn, type: node.data.type })} - entityType={node.data.type} - entityPlatform={node.data.platform?.name} - canEditLineage={node.data.canEditLineage} - /> - + {!isRestricted && ( + e.stopPropagation()} + > + onEntityCenter({ urn: node.data.urn, type: node.data.type })} + entityType={node.data.type} + entityPlatform={node.data.platform?.name} + canEditLineage={node.data.canEditLineage} + /> + + )} - {getShortenedTitle(platformDisplayText || '', width)} - - {' '} - |{' '} - + {platformDisplayText && ( + <> + {getShortenedTitle(platformDisplayText || '', width)} + + {' '} + |{' '} + + + )} {entityName} diff --git a/datahub-web-react/src/app/lineage/LineageExplorer.tsx b/datahub-web-react/src/app/lineage/LineageExplorer.tsx index a03f62f93abeb..26ffaa26a6ca2 100644 --- a/datahub-web-react/src/app/lineage/LineageExplorer.tsx +++ b/datahub-web-react/src/app/lineage/LineageExplorer.tsx @@ -220,9 +220,11 @@ export default function LineageExplorer({ urn, type }: Props) { - + {selectedEntity.type !== EntityType.Restricted && ( + + )} ) } diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index 8d84168d4c67e..b73a99488cf8a 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -269,6 +269,10 @@ fragment lineageNodeProperties on EntityWithRelationships { ... on MLPrimaryKey { ...nonRecursiveMLPrimaryKey } + ... on Restricted { + urn + type + } } fragment lineageFields on EntityWithRelationships { diff --git a/datahub-web-react/src/images/restricted.svg b/datahub-web-react/src/images/restricted.svg new file mode 100644 index 0000000000000..7537f7fb83fb4 --- /dev/null +++ b/datahub-web-react/src/images/restricted.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index 142c983182054..d58b7493d99a0 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -290,7 +290,7 @@ services: memory: 1G healthcheck: test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s - start_period: 20s + start_period: 30s interval: 1s retries: 3 timeout: 5s @@ -325,7 +325,7 @@ services: memory: 1G healthcheck: test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s - start_period: 20s + start_period: 30s interval: 1s retries: 3 timeout: 5s diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/ChangeMCP.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/ChangeMCP.java index 94e8bbab3ceeb..19896e2b03544 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/ChangeMCP.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/ChangeMCP.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.aspect.batch; import com.linkedin.data.DataMap; +import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.SystemAspect; import java.lang.reflect.InvocationTargetException; import javax.annotation.Nonnull; @@ -23,6 +24,14 @@ public interface ChangeMCP extends MCPItem { void setNextAspectVersion(long nextAspectVersion); + @Nullable + default RecordTemplate getPreviousRecordTemplate() { + if (getPreviousSystemAspect() != null) { + return getPreviousSystemAspect().getRecordTemplate(); + } + return null; + } + default T getPreviousAspect(Class clazz) { if (getPreviousSystemAspect() != null) { try { @@ -35,8 +44,7 @@ default T getPreviousAspect(Class clazz) { | NoSuchMethodException e) { throw new RuntimeException(e); } - } else { - return null; } + return null; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java new file mode 100644 index 0000000000000..45e9280199330 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java @@ -0,0 +1,111 @@ +package com.linkedin.metadata.aspect.hooks; + +import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.common.Owner; +import com.linkedin.common.Ownership; +import com.linkedin.common.UrnArray; +import com.linkedin.common.UrnArrayMap; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.aspect.batch.ChangeMCP; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; +import com.linkedin.util.Pair; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** Hook to populate the ownerType map within the ownership aspect */ +public class OwnerTypeMap extends MutationHook { + public OwnerTypeMap(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + @Override + protected Stream> writeMutation( + @Nonnull Collection changeMCPS, @Nonnull AspectRetriever aspectRetriever) { + + List> results = new LinkedList<>(); + + for (ChangeMCP item : changeMCPS) { + if (OWNERSHIP_ASPECT_NAME.equals(item.getAspectName()) && item.getRecordTemplate() != null) { + final Map> oldTypeOwner = + groupByOwnerType(item.getPreviousRecordTemplate()); + final Map> newTypeOwner = groupByOwnerType(item.getRecordTemplate()); + + Set removedTypes = + oldTypeOwner.keySet().stream() + .filter(typeUrn -> !newTypeOwner.containsKey(typeUrn)) + .collect(Collectors.toSet()); + + Set updatedTypes = newTypeOwner.keySet(); + + Map typeOwners = + Stream.concat(removedTypes.stream(), updatedTypes.stream()) + .map( + typeUrn -> { + final String typeFieldName = encodeFieldName(typeUrn.toString()); + if (removedTypes.contains(typeUrn)) { + // removed + return Pair.of(typeFieldName, new UrnArray()); + } + // updated + return Pair.of( + typeFieldName, + new UrnArray( + newTypeOwner.getOrDefault(typeUrn, Collections.emptySet()).stream() + .map(Owner::getOwner) + .collect(Collectors.toSet()))); + }) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); + + if (!typeOwners.isEmpty()) { + item.getAspect(Ownership.class).setOwnerTypes(new UrnArrayMap(typeOwners)); + results.add(Pair.of(item, true)); + continue; + } + } + + // no op + results.add(Pair.of(item, false)); + } + + return results.stream(); + } + + private static Map> groupByOwnerType( + @Nullable RecordTemplate ownershipRecordTemplate) { + if (ownershipRecordTemplate != null) { + Ownership ownership = new Ownership(ownershipRecordTemplate.data()); + if (!ownership.getOwners().isEmpty()) { + return ownership.getOwners().stream() + .collect( + Collectors.groupingBy( + owner -> + owner.getTypeUrn() != null + ? owner.getTypeUrn() + : DEFAULT_OWNERSHIP_TYPE_URN, + Collectors.toSet())); + } + } + return Collections.emptyMap(); + } + + public static String encodeFieldName(String value) { + return value.replaceAll("[.]", "%2E"); + } + + public static String decodeFieldName(String value) { + return value.replaceAll("%2E", "."); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java index 53a689602f27c..271f62128c70d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java @@ -22,10 +22,15 @@ public static String getSchemaFieldName(PathSpec pathSpec) { return lastComponent; } - public static Map getResolvedProperties(final DataSchema schema) { - return !schema.getResolvedProperties().isEmpty() - ? schema.getResolvedProperties() - : schema.getProperties(); + public static Map getResolvedProperties( + final DataSchema schema, Map fallback) { + if (!schema.getResolvedProperties().isEmpty()) { + return schema.getResolvedProperties(); + } else if (!schema.getProperties().isEmpty()) { + return schema.getProperties(); + } else { + return fallback; + } } public static Optional getPathSpecWithAspectName(TraverserContext context) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java index ad32b315f6b1a..855fba2ad46fb 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java @@ -9,6 +9,7 @@ import com.linkedin.data.schema.annotation.TraverserContext; import com.linkedin.metadata.models.annotation.RelationshipAnnotation; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -46,7 +47,7 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order // Next, check resolved properties for annotations on primitives. final Map resolvedProperties = - FieldSpecUtils.getResolvedProperties(currentSchema); + FieldSpecUtils.getResolvedProperties(currentSchema, Collections.emptyMap()); final Object resolvedAnnotationObj = resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java index a20fd36f0d70c..0c5e1a4c31598 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java @@ -109,10 +109,12 @@ private Object getAnnotationObj(TraverserContext context) { .equals("com.linkedin.common.urn.Urn"); final Map resolvedProperties = - FieldSpecUtils.getResolvedProperties(currentSchema); + FieldSpecUtils.getResolvedProperties(currentSchema, properties); // if primary doesn't have an annotation, then ignore secondary urns - if (isUrn && primaryAnnotationObj != null) { + if (isUrn + && primaryAnnotationObj != null + && resolvedProperties.containsKey(SearchableAnnotation.ANNOTATION_NAME)) { DataMap annotationMap = (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); Map result = new HashMap<>(annotationMap); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index 4808b7ee3b5ac..4ec8702efde70 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -20,6 +20,8 @@ public class SearchableAnnotation { public static final String FIELD_NAME_ALIASES = "fieldNameAliases"; public static final String ANNOTATION_NAME = "Searchable"; + public static final Set OBJECT_FIELD_TYPES = + ImmutableSet.of(FieldType.OBJECT, FieldType.MAP_ARRAY); private static final Set DEFAULT_QUERY_FIELD_TYPES = ImmutableSet.of( FieldType.TEXT, @@ -71,7 +73,8 @@ public enum FieldType { OBJECT, BROWSE_PATH_V2, WORD_GRAM, - DOUBLE + DOUBLE, + MAP_ARRAY } @Nonnull diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java new file mode 100644 index 0000000000000..895744bb182eb --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java @@ -0,0 +1,220 @@ +package com.linkedin.metadata.aspect.hooks; + +import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; + +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.UrnArray; +import com.linkedin.common.UrnArrayMap; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.aspect.batch.ChangeMCP; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.test.metadata.aspect.TestEntityRegistry; +import com.linkedin.test.metadata.aspect.batch.TestMCP; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; +import org.testng.annotations.Test; + +public class OwnerTypeMapTest { + private static final AspectRetriever ASPECT_RETRIEVER = mock(AspectRetriever.class); + private static final EntityRegistry ENTITY_REGISTRY = new TestEntityRegistry(); + private static final AspectPluginConfig ASPECT_PLUGIN_CONFIG = + AspectPluginConfig.builder() + .className("some class") + .enabled(true) + .supportedEntityAspectNames( + List.of( + AspectPluginConfig.EntityAspectName.builder() + .entityName("*") + .aspectName("ownership") + .build())) + .build(); + private static final Urn TEST_ENTITY_URN = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,calm-pagoda-323403.jaffle_shop.orders,PROD)"); + private static final Urn TEST_USER_A = UrnUtils.getUrn("urn:li:corpUser:a"); + private static final Urn TEST_USER_B = UrnUtils.getUrn("urn:li:corpUser:b"); + private static final Urn TEST_GROUP_A = UrnUtils.getUrn("urn:li:corpGroup:a"); + private static final Urn TEST_GROUP_B = UrnUtils.getUrn("urn:li:corpGroup:b"); + private static final Urn TECH_OWNER = + UrnUtils.getUrn("urn:li:ownershipType:__system__technical_owner"); + private static final Urn BUS_OWNER = + UrnUtils.getUrn("urn:li:ownershipType:__system__business_owner"); + + @Test + public void ownershipTypeMutationNoneType() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership ownership = buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); + testHook.writeMutation(buildMCP(null, ownership), ASPECT_RETRIEVER); + + assertEquals( + ownership.getOwnerTypes(), + new UrnArrayMap( + Map.of( + DEFAULT_OWNERSHIP_TYPE_URN.toString(), + new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)))), + "Expected generic owners to be grouped by `none` ownership type."); + } + + @Test + public void ownershipTypeMutationNoneTypeAdd() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership oldOwnership = buildOwnership(Map.of(TEST_USER_A, List.of())); + Ownership newOwnership = + buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); + testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); + + assertEquals( + newOwnership.getOwnerTypes(), + new UrnArrayMap( + Map.of( + DEFAULT_OWNERSHIP_TYPE_URN.toString(), + new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)))), + "Expected generic owners to be grouped by `none` ownership type."); + } + + @Test + public void ownershipTypeMutationNoneTypeRemove() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership oldOwnership = + buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); + Ownership newOwnership = buildOwnership(Map.of(TEST_USER_A, List.of())); + testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); + + assertEquals( + newOwnership.getOwnerTypes(), + new UrnArrayMap( + Map.of(DEFAULT_OWNERSHIP_TYPE_URN.toString(), new UrnArray(List.of(TEST_USER_A)))), + "Expected generic owners to be grouped by `none` ownership type."); + } + + @Test + public void ownershipTypeMutationMixedType() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership ownership = + buildOwnership( + Map.of( + TEST_USER_A, + List.of(), + TEST_GROUP_A, + List.of(), + TEST_USER_B, + List.of(BUS_OWNER), + TEST_GROUP_B, + List.of(TECH_OWNER))); + testHook.writeMutation(buildMCP(null, ownership), ASPECT_RETRIEVER); + + assertEquals( + ownership.getOwnerTypes(), + new UrnArrayMap( + Map.of( + DEFAULT_OWNERSHIP_TYPE_URN.toString(), + new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)), + BUS_OWNER.toString(), + new UrnArray(List.of(TEST_USER_B)), + TECH_OWNER.toString(), + new UrnArray(List.of(TEST_GROUP_B)))), + "Expected generic owners to be grouped by `none` ownership type as well as specified types."); + } + + @Test + public void ownershipTypeMutationMixedTypeAdd() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership oldOwnership = + buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_USER_B, List.of(BUS_OWNER))); + Ownership newOwnership = + buildOwnership( + Map.of( + TEST_USER_A, + List.of(), + TEST_GROUP_A, + List.of(), + TEST_USER_B, + List.of(BUS_OWNER), + TEST_GROUP_B, + List.of(TECH_OWNER))); + testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); + + assertEquals( + newOwnership.getOwnerTypes(), + new UrnArrayMap( + Map.of( + DEFAULT_OWNERSHIP_TYPE_URN.toString(), + new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)), + BUS_OWNER.toString(), + new UrnArray(List.of(TEST_USER_B)), + TECH_OWNER.toString(), + new UrnArray(List.of(TEST_GROUP_B)))), + "Expected generic owners to be grouped by `none` ownership type as well as specified types."); + } + + @Test + public void ownershipTypeMutationMixedTypeRemove() { + OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); + Ownership oldOwnership = + buildOwnership( + Map.of( + TEST_USER_A, + List.of(), + TEST_GROUP_A, + List.of(), + TEST_USER_B, + List.of(BUS_OWNER), + TEST_GROUP_B, + List.of(TECH_OWNER))); + Ownership newOwnership = + buildOwnership(Map.of(TEST_GROUP_A, List.of(), TEST_GROUP_B, List.of(TECH_OWNER))); + testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); + + assertEquals( + newOwnership.getOwnerTypes(), + new UrnArrayMap( + Map.of( + DEFAULT_OWNERSHIP_TYPE_URN.toString(), + new UrnArray(List.of(TEST_GROUP_A)), + BUS_OWNER.toString(), + new UrnArray(), + TECH_OWNER.toString(), + new UrnArray(List.of(TEST_GROUP_B)))), + "Expected generic owners to be grouped by `none` ownership type as well as specified types."); + } + + private static Ownership buildOwnership(Map> ownershipTypes) { + Ownership ownership = new Ownership(); + ownership.setOwners( + ownershipTypes.entrySet().stream() + .flatMap( + entry -> { + if (entry.getValue().isEmpty()) { + Owner owner = new Owner(); + owner.setOwner(entry.getKey()); + return Stream.of(owner); + } else { + return entry.getValue().stream() + .map( + typeUrn -> { + Owner owner = new Owner(); + owner.setOwner(entry.getKey()); + owner.setTypeUrn(typeUrn); + return owner; + }); + } + }) + .collect(Collectors.toCollection(OwnerArray::new))); + return ownership; + } + + private static Set buildMCP(@Nullable Ownership oldOwnership, Ownership newOwnership) { + return TestMCP.ofOneMCP(TEST_ENTITY_URN, oldOwnership, newOwnership, ENTITY_REGISTRY); + } +} diff --git a/gradle.properties b/gradle.properties index f410ff01bf397..0b797d6a9ab20 100644 --- a/gradle.properties +++ b/gradle.properties @@ -3,8 +3,8 @@ org.gradle.configureondemand=true org.gradle.parallel=true org.gradle.caching=true -# Increase gradle JVM memory to 3GB to allow tests to run locally -org.gradle.jvmargs=-Xmx3000m +# Increase gradle JVM memory to 4GB to allow tests to run locally +org.gradle.jvmargs=-Xmx4000m # Increase retries to 5 (from default of 3) and increase interval from 125ms to 1s. # Based on this thread https://github.com/gradle/gradle/issues/4629, it's unclear # if we should be using systemProp or not. We're using both for now. diff --git a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java index 3d3883b4af9df..1472fa6b19f82 100644 --- a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java +++ b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java @@ -1,6 +1,5 @@ package com.datahub.metadata.ingestion; -import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -24,6 +23,7 @@ import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; @@ -75,34 +75,34 @@ @RequiredArgsConstructor public class IngestionScheduler { - private final Authentication _systemAuthentication; - private final EntityClient _entityClient; + private final OperationContext systemOpContext; + private final EntityClient entityClient; // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the // source // Visible for testing - final Map> _nextIngestionSourceExecutionCache = new HashMap<>(); + final Map> nextIngestionSourceExecutionCache = new HashMap<>(); // Shared executor service used for executing an ingestion source on a schedule - private final ScheduledExecutorService _sharedExecutorService = + private final ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1); - private final IngestionConfiguration _ingestionConfiguration; - private final int _batchGetDelayIntervalSeconds; - private final int _batchGetRefreshIntervalSeconds; + private final IngestionConfiguration ingestionConfiguration; + private final int batchGetDelayIntervalSeconds; + private final int batchGetRefreshIntervalSeconds; public void init() { final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = new BatchRefreshSchedulesRunnable( - _systemAuthentication, - _entityClient, + systemOpContext, + entityClient, this::scheduleNextIngestionSourceExecution, this::unscheduleAll); // Schedule a recurring batch-reload task. - _sharedExecutorService.scheduleAtFixedRate( + scheduledExecutorService.scheduleAtFixedRate( batchRefreshSchedulesRunnable, - _batchGetDelayIntervalSeconds, - _batchGetRefreshIntervalSeconds, + batchGetDelayIntervalSeconds, + batchGetRefreshIntervalSeconds, TimeUnit.SECONDS); } @@ -110,10 +110,10 @@ public void init() { public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) { log.info("Unscheduling ingestion source with urn {}", ingestionSourceUrn); // Deleting an ingestion source schedule. Un-schedule the next execution. - ScheduledFuture future = _nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + ScheduledFuture future = nextIngestionSourceExecutionCache.get(ingestionSourceUrn); if (future != null) { future.cancel(false); // Do not interrupt running processes - _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn); + nextIngestionSourceExecutionCache.remove(ingestionSourceUrn); } } @@ -125,7 +125,7 @@ public void unscheduleAll() { // Deleting an ingestion source schedule. Un-schedule the next execution. Set scheduledSources = new HashSet<>( - _nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. + nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. for (Urn urn : scheduledSources) { unscheduleNextIngestionSourceExecution(urn); } @@ -173,19 +173,19 @@ public void scheduleNextIngestionSourceExecution( // Schedule the ingestion source to run some time in the future. final ExecutionRequestRunnable executionRequestRunnable = new ExecutionRequestRunnable( - _systemAuthentication, - _entityClient, - _ingestionConfiguration, + systemOpContext, + entityClient, + ingestionConfiguration, ingestionSourceUrn, newInfo, - () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), + () -> nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), this::scheduleNextIngestionSourceExecution); // Schedule the next ingestion run final ScheduledFuture scheduledFuture = - _sharedExecutorService.schedule( + scheduledExecutorService.schedule( executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); - _nextIngestionSourceExecutionCache.put(ingestionSourceUrn, scheduledFuture); + nextIngestionSourceExecutionCache.put(ingestionSourceUrn, scheduledFuture); log.info( String.format( @@ -216,22 +216,22 @@ public void scheduleNextIngestionSourceExecution( @VisibleForTesting static class BatchRefreshSchedulesRunnable implements Runnable { - private final Authentication _systemAuthentication; - private final EntityClient _entityClient; - private final BiConsumer _scheduleNextIngestionSourceExecution; - private final Runnable _unscheduleAll; + private final OperationContext systemOpContext; + private final EntityClient entityClient; + private final BiConsumer scheduleNextIngestionSourceExecution; + private final Runnable unscheduleAll; public BatchRefreshSchedulesRunnable( - @Nonnull final Authentication systemAuthentication, + @Nonnull final OperationContext systemOpContext, @Nonnull final EntityClient entityClient, @Nonnull final BiConsumer scheduleNextIngestionSourceExecution, @Nonnull final Runnable unscheduleAll) { - _systemAuthentication = Objects.requireNonNull(systemAuthentication); - _entityClient = Objects.requireNonNull(entityClient); - _scheduleNextIngestionSourceExecution = + this.systemOpContext = systemOpContext; + this.entityClient = Objects.requireNonNull(entityClient); + this.scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); - _unscheduleAll = unscheduleAll; + this.unscheduleAll = unscheduleAll; } @Override @@ -239,7 +239,7 @@ public void run() { try { // First un-schedule all currently scheduled runs (to make sure consistency is maintained) - _unscheduleAll.run(); + unscheduleAll.run(); int start = 0; int count = 30; @@ -254,20 +254,20 @@ public void run() { // 1. List all ingestion source urns. final ListResult ingestionSourceUrns = - _entityClient.list( + entityClient.list( + systemOpContext, Constants.INGESTION_SOURCE_ENTITY_NAME, Collections.emptyMap(), start, - count, - _systemAuthentication); + count); // 2. Fetch all ingestion sources, specifically the "info" aspect. final Map ingestionSources = - _entityClient.batchGetV2( + entityClient.batchGetV2( Constants.INGESTION_SOURCE_ENTITY_NAME, new HashSet<>(ingestionSourceUrns.getEntities()), ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - _systemAuthentication); + systemOpContext.getSessionAuthentication()); // 3. Reschedule ingestion sources based on the fetched schedules (inside "info") log.debug( @@ -310,7 +310,7 @@ private void scheduleNextIngestionRuns( new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); // Invoke the "scheduleNextIngestionSourceExecution" (passed from parent) - _scheduleNextIngestionSourceExecution.accept(entityUrn, ingestionSourceInfo); + scheduleNextIngestionSourceExecution.accept(entityUrn, ingestionSourceInfo); } } } @@ -330,23 +330,23 @@ static class ExecutionRequestRunnable implements Runnable { private static final String VERSION_ARGUMENT_NAME = "version"; private static final String DEBUG_MODE_ARG_NAME = "debug_mode"; - private final Authentication _systemAuthentication; - private final EntityClient _entityClient; - private final IngestionConfiguration _ingestionConfiguration; + private final OperationContext systemOpContext; + private final EntityClient entityClient; + private final IngestionConfiguration ingestionConfiguration; // Information about the ingestion source being executed - private final Urn _ingestionSourceUrn; - private final DataHubIngestionSourceInfo _ingestionSourceInfo; + private final Urn ingestionSourceUrn; + private final DataHubIngestionSourceInfo ingestionSourceInfo; // Used for clearing the "next execution" cache once a corresponding execution request has been // created. - private final Runnable _deleteNextIngestionSourceExecution; + private final Runnable deleteNextIngestionSourceExecution; // Used for re-scheduling the ingestion source once it has executed! - private final BiConsumer _scheduleNextIngestionSourceExecution; + private final BiConsumer scheduleNextIngestionSourceExecution; public ExecutionRequestRunnable( - @Nonnull final Authentication systemAuthentication, + @Nonnull final OperationContext systemOpContext, @Nonnull final EntityClient entityClient, @Nonnull final IngestionConfiguration ingestionConfiguration, @Nonnull final Urn ingestionSourceUrn, @@ -355,14 +355,14 @@ public ExecutionRequestRunnable( @Nonnull final BiConsumer scheduleNextIngestionSourceExecution) { - _systemAuthentication = Objects.requireNonNull(systemAuthentication); - _entityClient = Objects.requireNonNull(entityClient); - _ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); - _ingestionSourceUrn = Objects.requireNonNull(ingestionSourceUrn); - _ingestionSourceInfo = Objects.requireNonNull(ingestionSourceInfo); - _deleteNextIngestionSourceExecution = + this.systemOpContext = systemOpContext; + this.entityClient = Objects.requireNonNull(entityClient); + this.ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); + this.ingestionSourceUrn = Objects.requireNonNull(ingestionSourceUrn); + this.ingestionSourceInfo = Objects.requireNonNull(ingestionSourceInfo); + this.deleteNextIngestionSourceExecution = Objects.requireNonNull(deleteNextIngestionSourceExecution); - _scheduleNextIngestionSourceExecution = + this.scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); } @@ -371,14 +371,14 @@ public void run() { // Remove the next ingestion execution as we are going to execute it now. (no retry logic // currently) - _deleteNextIngestionSourceExecution.run(); + deleteNextIngestionSourceExecution.run(); try { log.info( String.format( "Creating Execution Request for scheduled Ingestion Source with urn %s", - _ingestionSourceUrn)); + ingestionSourceUrn)); // Create a new Execution Request Proposal final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -395,23 +395,23 @@ public void run() { input.setSource( new ExecutionRequestSource() .setType(EXECUTION_REQUEST_SOURCE_NAME) - .setIngestionSource(_ingestionSourceUrn)); - input.setExecutorId(_ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + .setIngestionSource(ingestionSourceUrn)); + input.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); input.setRequestedAt(System.currentTimeMillis()); Map arguments = new HashMap<>(); String recipe = IngestionUtils.injectPipelineName( - _ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); + ingestionSourceInfo.getConfig().getRecipe(), ingestionSourceUrn.toString()); arguments.put(RECIPE_ARGUMENT_NAME, recipe); arguments.put( VERSION_ARGUMENT_NAME, - _ingestionSourceInfo.getConfig().hasVersion() - ? _ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion()); + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : ingestionConfiguration.getDefaultCliVersion()); String debugMode = "false"; - if (_ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = _ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; } arguments.put(DEBUG_MODE_ARG_NAME, debugMode); input.setArgs(new StringMap(arguments)); @@ -421,18 +421,18 @@ public void run() { proposal.setAspect(GenericRecordUtils.serializeAspect(input)); proposal.setChangeType(ChangeType.UPSERT); - _entityClient.ingestProposal(proposal, _systemAuthentication); + entityClient.ingestProposal(proposal, systemOpContext.getSystemAuthentication().get()); } catch (Exception e) { // TODO: This type of thing should likely be proactively reported. log.error( String.format( "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", - _ingestionSourceUrn), + ingestionSourceUrn), e); } // 2. Re-Schedule the next execution request. - _scheduleNextIngestionSourceExecution.accept(_ingestionSourceUrn, _ingestionSourceInfo); + scheduleNextIngestionSourceExecution.accept(ingestionSourceUrn, ingestionSourceInfo); } } diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index 8174afc20765f..f07a9df941a14 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -2,7 +2,6 @@ import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.UrnArray; @@ -18,6 +17,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; @@ -93,11 +93,11 @@ public void setupTest() throws Exception { // Set up mocks for ingestion source batch fetching Mockito.when( mockClient.list( + Mockito.any(), Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), Mockito.eq(Collections.emptyMap()), Mockito.eq(0), - Mockito.eq(30), - Mockito.any())) + Mockito.eq(30))) .thenReturn( new ListResult() .setCount(30) @@ -117,7 +117,7 @@ public void setupTest() throws Exception { _ingestionScheduler = new IngestionScheduler( - Mockito.mock(Authentication.class), + Mockito.mock(OperationContext.class), mockClient, Mockito.mock(IngestionConfiguration.class), 1, @@ -128,11 +128,11 @@ public void setupTest() throws Exception { @Test public void testInvokeUpdateExistingSchedule() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); Urn ingestionSourceUrn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); Future beforeFuture = - _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + _ingestionScheduler.nextIngestionSourceExecutionCache.get(ingestionSourceUrn); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); newInfo.setSchedule( @@ -149,9 +149,9 @@ public void testInvokeUpdateExistingSchedule() throws Exception { // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(ingestionSourceUrn, newInfo); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); Future newFuture = - _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + _ingestionScheduler.nextIngestionSourceExecutionCache.get(ingestionSourceUrn); // Ensure that there is an overwritten future. Assert.assertNotSame(beforeFuture, newFuture); @@ -159,7 +159,7 @@ public void testInvokeUpdateExistingSchedule() throws Exception { @Test public void testInvokeNewSchedule() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); @@ -177,12 +177,12 @@ public void testInvokeNewSchedule() throws Exception { // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 2); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 2); } @Test public void testInvokeInvalidSchedule() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); @@ -201,12 +201,12 @@ public void testInvokeInvalidSchedule() throws Exception { // Assert that no changes have been made to next execution cache. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); } @Test public void testInvokeMissingSchedule() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); @@ -221,27 +221,27 @@ public void testInvokeMissingSchedule() throws Exception { // Assert that the schedule has been removed. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 0); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 0); } @Test public void testInvokeDelete() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); // Attempt to delete an unscheduled urn final Urn urn1 = Urn.createFromString("urn:li:dataHubIngestionSource:not-scheduled"); _ingestionScheduler.unscheduleNextIngestionSourceExecution(urn1); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); // Attempt to delete a scheduled urn final Urn urn2 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); _ingestionScheduler.unscheduleNextIngestionSourceExecution(urn2); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 0); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 0); } @Test public void testSchedule() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); @@ -259,7 +259,7 @@ public void testSchedule() throws Exception { _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); - ScheduledFuture future = _ingestionScheduler._nextIngestionSourceExecutionCache.get(urn); + ScheduledFuture future = _ingestionScheduler.nextIngestionSourceExecutionCache.get(urn); Assert.assertTrue( future.getDelay(TimeUnit.SECONDS) < 60); // Next execution must always be less than a minute away. @@ -267,7 +267,7 @@ public void testSchedule() throws Exception { @Test public void testUnscheduleAll() throws Exception { - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 1); final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:3"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); @@ -284,16 +284,16 @@ public void testUnscheduleAll() throws Exception { .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); - assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 2); + assertEquals(_ingestionScheduler.nextIngestionSourceExecutionCache.size(), 2); // Get reference to schedules futures - ScheduledFuture future = _ingestionScheduler._nextIngestionSourceExecutionCache.get(urn); + ScheduledFuture future = _ingestionScheduler.nextIngestionSourceExecutionCache.get(urn); // Unschedule all _ingestionScheduler.unscheduleAll(); // Ensure that the cache is empty - Assert.assertTrue(_ingestionScheduler._nextIngestionSourceExecutionCache.isEmpty()); + Assert.assertTrue(_ingestionScheduler.nextIngestionSourceExecutionCache.isEmpty()); // And that the future is cancelled Assert.assertTrue(future.isCancelled()); diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 94be2f288521c..6e08986595661 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -1,6 +1,7 @@ package com.linkedin.metadata; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; /** Static class containing commonly-used constants across DataHub services. */ public class Constants { @@ -79,10 +80,13 @@ public class Constants { public static final String QUERY_ENTITY_NAME = "query"; public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; + public static final Urn DEFAULT_OWNERSHIP_TYPE_URN = + UrnUtils.getUrn("urn:li:ownershipType:__system__none"); public static final String STRUCTURED_PROPERTY_ENTITY_NAME = "structuredProperty"; public static final String DATA_TYPE_ENTITY_NAME = "dataType"; public static final String ENTITY_TYPE_ENTITY_NAME = "entityType"; public static final String FORM_ENTITY_NAME = "form"; + public static final String RESTRICTED_ENTITY_NAME = "restricted"; /** Aspects */ // Common diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java index bc3a3c9f385a6..adbfdbe3236fc 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java @@ -1,6 +1,6 @@ package com.datahub.authorization; -import java.util.List; +import java.util.Collection; /** * Represents a group of privileges that must ALL be required to authorize a request. @@ -8,13 +8,13 @@ *

That is, an AND of privileges. */ public class ConjunctivePrivilegeGroup { - private final List _requiredPrivileges; + private final Collection _requiredPrivileges; - public ConjunctivePrivilegeGroup(List requiredPrivileges) { + public ConjunctivePrivilegeGroup(Collection requiredPrivileges) { _requiredPrivileges = requiredPrivileges; } - public List getRequiredPrivileges() { + public Collection getRequiredPrivileges() { return _requiredPrivileges; } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java new file mode 100644 index 0000000000000..cb176130d2e78 --- /dev/null +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java @@ -0,0 +1,24 @@ +package com.datahub.authorization.config; + +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Builder(toBuilder = true) +@Data +@AllArgsConstructor(access = AccessLevel.PACKAGE) +@NoArgsConstructor(access = AccessLevel.PACKAGE) +public class SearchAuthorizationConfiguration { + private boolean enabled; + private SearchAuthorizationRecommendationsConfiguration recommendations; + + @Builder(toBuilder = true) + @Data + @AllArgsConstructor(access = AccessLevel.PACKAGE) + @NoArgsConstructor(access = AccessLevel.PACKAGE) + public static class SearchAuthorizationRecommendationsConfiguration { + private boolean peerGroupEnabled; + } +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java index a6baf0b5b282c..3c113dcd2e052 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java @@ -6,8 +6,13 @@ import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.Plugin; +import com.linkedin.common.urn.Urn; +import com.linkedin.policy.DataHubPolicyInfo; +import java.util.Collection; +import java.util.Collections; import java.util.Map; import java.util.Optional; +import java.util.Set; import javax.annotation.Nonnull; /** @@ -15,22 +20,53 @@ * privilege. */ public interface Authorizer extends Plugin { + Authorizer EMPTY = new Authorizer() {}; + /** * Initialize the Authorizer. Invoked once at boot time. * * @param authorizerConfig config provided to the authenticator derived from the Metadata Service * YAML config. This config comes from the "authorization.authorizers.config" configuration. */ - void init( - @Nonnull final Map authorizerConfig, @Nonnull final AuthorizerContext ctx); + default void init( + @Nonnull final Map authorizerConfig, @Nonnull final AuthorizerContext ctx) {} /** Authorizes an action based on the actor, the resource, and required privileges. */ - AuthorizationResult authorize(@Nonnull final AuthorizationRequest request); + default AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, "Not Implemented."); + } /** * Retrieves the current list of actors authorized to for a particular privilege against an * optional resource */ - AuthorizedActors authorizedActors( - final String privilege, final Optional resourceSpec); + default AuthorizedActors authorizedActors( + final String privilege, final Optional resourceSpec) { + return AuthorizedActors.builder() + .privilege(privilege) + .users(Collections.emptyList()) + .roles(Collections.emptyList()) + .groups(Collections.emptyList()) + .build(); + } + + /** + * Given the actor's urn retrieve the policies. + * + * @param actorUrn + * @return + */ + default Set getActorPolicies(@Nonnull Urn actorUrn) { + return Collections.emptySet(); + } + + /** Given the actor's urn retrieve the actor's groups */ + default Collection getActorGroups(@Nonnull Urn actorUrn) { + return Collections.emptyList(); + } + + /** Given an actor's urn retrieve the actor's peers */ + default Collection getActorPeers(@Nonnull Urn actorUrn) { + return Collections.emptyList(); + } } diff --git a/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mae.json b/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mae.json index bd4bd5878d3bb..0f107f699428f 100644 --- a/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mae.json +++ b/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mae.json @@ -16,6 +16,7 @@ "source": null } ], + "ownerTypes": null, "lastModified": { "time": 0, "actor": "urn:li:corpuser:foobar", diff --git a/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mce.json b/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mce.json index 8aba67eec4cee..850660e59a60e 100644 --- a/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mce.json +++ b/metadata-events/mxe-utils-avro/src/test/resources/test-avro2pegasus-mce.json @@ -14,6 +14,7 @@ "source": null } ], + "ownerTypes": null, "lastModified": { "time": 0, "actor": "urn:li:corpuser:foobar", diff --git a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py index cc8710325a8f1..7a2dfa7ae0705 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py +++ b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py @@ -226,7 +226,7 @@ def get_resource_owners_work_unit( if not current_ownership: # If we want to overwrite or there are no existing tags, create a new GlobalTags object - current_ownership = OwnershipClass(owners, get_audit_stamp()) + current_ownership = OwnershipClass(owners, lastModified=get_audit_stamp()) else: current_owner_urns: Set[str] = set( [owner.owner for owner in current_ownership.owners] diff --git a/metadata-ingestion/tests/unit/test_rest_sink.py b/metadata-ingestion/tests/unit/test_rest_sink.py index 82e02aced5a67..7bfa09a35951b 100644 --- a/metadata-ingestion/tests/unit/test_rest_sink.py +++ b/metadata-ingestion/tests/unit/test_rest_sink.py @@ -235,7 +235,7 @@ "changeType": "UPSERT", "aspectName": "ownership", "aspect": { - "value": '{"owners": [{"owner": "urn:li:corpuser:fbar", "type": "DATAOWNER"}], "lastModified": {"time": 0, "actor": "urn:li:corpuser:fbar"}}', + "value": '{"owners": [{"owner": "urn:li:corpuser:fbar", "type": "DATAOWNER"}], "ownerTypes": {}, "lastModified": {"time": 0, "actor": "urn:li:corpuser:fbar"}}', "contentType": "application/json", }, } diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index f96517d93fca6..e07a6ee55c21f 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -9,6 +9,7 @@ configurations { dependencies { implementation project(':entity-registry') + implementation project(':metadata-service:auth-config') api project(':metadata-utils') api project(':metadata-events:mxe-avro') api project(':metadata-events:mxe-registration') @@ -17,6 +18,7 @@ dependencies { api project(':metadata-service:restli-client') api project(':metadata-service:configuration') api project(':metadata-service:services') + api project(':metadata-operation-context') implementation spec.product.pegasus.data implementation spec.product.pegasus.generator diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java index 0fcb765b340cf..6b3d1ad6e193c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java @@ -17,11 +17,12 @@ import lombok.RequiredArgsConstructor; import org.springframework.stereotype.Component; +@Getter @Builder @Component @RequiredArgsConstructor public class EntityClientAspectRetriever implements CachingAspectRetriever { - @Getter private final EntityRegistry entityRegistry; + private final EntityRegistry entityRegistry; private final SystemEntityClient entityClient; /** diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index fed6379f92104..c261d7fefd411 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -36,7 +36,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.query.ListUrnsResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.EntitySearchService; @@ -57,6 +56,7 @@ import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.time.Clock; @@ -84,6 +84,7 @@ public class JavaEntityClient implements EntityClient { private final Clock _clock = Clock.systemUTC(); + private final OperationContext opContext; private final EntityService entityService; private final DeleteEntityService deleteEntityService; private final EntitySearchService entitySearchService; @@ -165,15 +166,15 @@ public Map batchGet( */ @Nonnull public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, @Nonnull int limit, - @Nullable String field, - @Nonnull final Authentication authentication) + @Nullable String field) throws RemoteInvocationException { return cachingEntitySearchService.autoComplete( - entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); + opContext, entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit); } /** @@ -187,14 +188,14 @@ public AutoCompleteResult autoComplete( */ @Nonnull public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull final Authentication authentication) + @Nonnull int limit) throws RemoteInvocationException { return cachingEntitySearchService.autoComplete( - entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); + opContext, entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit); } /** @@ -207,18 +208,19 @@ public AutoCompleteResult autoComplete( * @param limit max number of datasets * @throws RemoteInvocationException */ + @Override @Nonnull public BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String path, @Nullable Map requestFilters, int start, - int limit, - @Nonnull final Authentication authentication) + int limit) throws RemoteInvocationException { return ValidationUtils.validateBrowseResult( cachingEntitySearchService.browse( - entityType, path, newFilter(requestFilters), start, limit, null), + opContext, entityType, path, newFilter(requestFilters), start, limit), entityService); } @@ -235,16 +237,15 @@ entityType, path, newFilter(requestFilters), start, limit, null), */ @Nonnull public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) { + int count) { // TODO: cache browseV2 results - return entitySearchService.browseV2(entityName, path, filter, input, start, count, searchFlags); + return entitySearchService.browseV2(opContext, entityName, path, filter, input, start, count); } /** @@ -260,17 +261,15 @@ public BrowseResultV2 browseV2( */ @Nonnull public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) { + int count) { // TODO: cache browseV2 results - return entitySearchService.browseV2( - entityNames, path, filter, input, start, count, searchFlags); + return entitySearchService.browseV2(opContext, entityNames, path, filter, input, start, count); } @SneakyThrows @@ -324,7 +323,6 @@ public void batchUpdate( * @param requestFilters search filters * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags * @return a set of search results * @throws RemoteInvocationException */ @@ -332,18 +330,17 @@ public void batchUpdate( @WithSpan @Override public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Map requestFilters, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException { return ValidationUtils.validateSearchResult( entitySearchService.search( - List.of(entity), input, newFilter(requestFilters), null, start, count, searchFlags), + opContext, List.of(entity), input, newFilter(requestFilters), null, start, count), entityService); } @@ -358,18 +355,25 @@ public SearchResult search( * @return a set of list results * @throws RemoteInvocationException */ + @Override @Deprecated @Nonnull public ListResult list( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nullable Map requestFilters, int start, - int count, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { return ValidationUtils.validateListResult( toListResult( - entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), + entitySearchService.filter( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + entity, + newFilter(requestFilters), + null, + start, + count)), entityService); } @@ -387,34 +391,33 @@ public ListResult list( @Nonnull @Override public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException { return ValidationUtils.validateSearchResult( entitySearchService.search( - List.of(entity), input, filter, sortCriterion, start, count, searchFlags), + opContext, List.of(entity), input, filter, sortCriterion, start, count), entityService); } + @Override @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication) + @Nullable SortCriterion sortCriterion) throws RemoteInvocationException { return searchAcrossEntities( - entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + opContext, entities, input, filter, start, count, sortCriterion, null); } /** @@ -430,49 +433,61 @@ public SearchResult searchAcrossEntities( * @return Snapshot key * @throws RemoteInvocationException */ + @Override @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication, @Nullable List facets) throws RemoteInvocationException { - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateSearchResult( searchService.searchAcrossEntities( - entities, input, filter, sortCriterion, start, count, finalFlags, facets), + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + entities, + input, + filter, + sortCriterion, + start, + count, + facets), entityService); } @Nonnull @Override public ScrollResult scrollAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull Authentication authentication) + int count) throws RemoteInvocationException { - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateScrollResult( searchService.scrollAcrossEntities( - entities, input, filter, null, scrollId, keepAlive, count, finalFlags), + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + entities, + input, + filter, + null, + scrollId, + keepAlive, + count), entityService); } @Nonnull @Override public LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -481,12 +496,11 @@ public LineageSearchResult searchAcrossLineage( @Nullable Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { return ValidationUtils.validateLineageSearchResult( lineageSearchService.searchAcrossLineage( + opContext, sourceUrn, direction, entities, @@ -497,14 +511,14 @@ public LineageSearchResult searchAcrossLineage( start, count, null, - null, - searchFlags), + null), entityService); } @Nonnull @Override public LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -515,12 +529,11 @@ public LineageSearchResult searchAcrossLineage( int start, int count, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable Long endTimeMillis) throws RemoteInvocationException { return ValidationUtils.validateLineageSearchResult( lineageSearchService.searchAcrossLineage( + opContext, sourceUrn, direction, entities, @@ -531,14 +544,14 @@ public LineageSearchResult searchAcrossLineage( start, count, startTimeMillis, - endTimeMillis, - searchFlags), + endTimeMillis), entityService); } @Nonnull @Override public LineageScrollResult scrollAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -550,14 +563,12 @@ public LineageScrollResult scrollAcrossLineage( @Nonnull String keepAlive, int count, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable Long endTimeMillis) throws RemoteInvocationException { - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); + return ValidationUtils.validateLineageScrollResult( lineageSearchService.scrollAcrossLineage( + opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), sourceUrn, direction, entities, @@ -569,8 +580,7 @@ public LineageScrollResult scrollAcrossLineage( keepAlive, count, startTimeMillis, - endTimeMillis, - finalFlags), + endTimeMillis), entityService); } @@ -581,35 +591,40 @@ public LineageScrollResult scrollAcrossLineage( * @return list of paths given urn * @throws RemoteInvocationException */ + @Override @Nonnull public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { return new StringArray(entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); } + @Override public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) throws RemoteInvocationException { entityService.setWritable(canWrite); } + @Override @Nonnull public Map batchGetTotalEntityCount( - @Nonnull List entityNames, @Nonnull final Authentication authentication) + @Nonnull OperationContext opContext, @Nonnull List entityNames) throws RemoteInvocationException { - return searchService.docCountPerEntity(entityNames); + return searchService.docCountPerEntity(opContext, entityNames); } /** List all urns existing for a particular Entity type. */ + @Override public ListUrnsResult listUrns( @Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) + @Nonnull Authentication authentication) throws RemoteInvocationException { return entityService.listUrns(entityName, start, count); } /** Hard delete an entity with a particular urn. */ + @Override public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { entityService.deleteUrn(urn); @@ -624,15 +639,22 @@ public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication aut @Nonnull @Override public SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { return ValidationUtils.validateSearchResult( - entitySearchService.filter(entity, filter, sortCriterion, start, count), entityService); + entitySearchService.filter( + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + entity, + filter, + sortCriterion, + start, + count), + entityService); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index fa020903c34f0..1f0a43821a7f9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.client; -import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; @@ -13,17 +12,21 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import javax.annotation.Nonnull; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; +import org.springframework.beans.factory.annotation.Qualifier; /** Java backed SystemEntityClient */ @Getter public class SystemJavaEntityClient extends JavaEntityClient implements SystemEntityClient { private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final OperationContext systemOperationContext; + private final ConcurrentHashMap operationContextMap; public SystemJavaEntityClient( + @Qualifier("systemOperationContext") OperationContext systemOperationContext, EntityService entityService, DeleteEntityService deleteEntityService, EntitySearchService entitySearchService, @@ -33,9 +36,9 @@ public SystemJavaEntityClient( TimeseriesAspectService timeseriesAspectService, RollbackService rollbackService, EventProducer eventProducer, - @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super( + systemOperationContext, entityService, deleteEntityService, entitySearchService, @@ -45,8 +48,8 @@ public SystemJavaEntityClient( timeseriesAspectService, rollbackService, eventProducer); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = - buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); + this.operationContextMap = new ConcurrentHashMap<>(); + this.systemOperationContext = systemOperationContext; + this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, cacheConfig); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 3edb55f265dc1..aa60e7e528673 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -63,6 +63,9 @@ public Pair>, List> toUpsertBatchItems( upsertItem = patchBatchItem.applyPatch(currentValue, aspectRetriever); } + // Populate old aspect for write hooks + upsertItem.setPreviousSystemAspect(latest); + return upsertItem; }) .collect(Collectors.toCollection(LinkedList::new)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index f5c783014caa1..59109c8c3de64 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.recommendation.candidatesource; import com.codahale.metrics.Timer; +import com.datahub.authorization.config.SearchAuthorizationConfiguration; import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; @@ -15,9 +15,11 @@ import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -29,6 +31,7 @@ import org.opensearch.client.RestHighLevelClient; import org.opensearch.client.indices.GetIndexRequest; import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; @@ -78,7 +81,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { analyticsEnabled = @@ -96,8 +99,8 @@ public boolean isEligible( @Override @WithSpan public List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { - SearchRequest searchRequest = buildSearchRequest(userUrn); + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { + SearchRequest searchRequest = buildSearchRequest(opContext); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getMostPopular").time()) { final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); @@ -121,11 +124,15 @@ public Set getSupportedEntityTypes() { return SUPPORTED_ENTITY_TYPES; } - private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { + private SearchRequest buildSearchRequest(@Nonnull OperationContext opContext) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); SearchSourceBuilder source = new SearchSourceBuilder(); BoolQueryBuilder query = QueryBuilders.boolQuery(); + + // Potentially limit actors + restrictPeers(opContext).ifPresent(query::must); + // Filter for all entity view events query.must( QueryBuilders.termQuery( @@ -144,4 +151,23 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } + + // If search access controls enabled, restrict user activity to peers + private static Optional restrictPeers(@Nonnull OperationContext opContext) { + SearchAuthorizationConfiguration config = + opContext.getOperationContextConfig().getSearchAuthorizationConfiguration(); + + if (config.isEnabled() + && config.getRecommendations().isPeerGroupEnabled() + && !opContext.isSystemAuth()) { + return Optional.of( + QueryBuilders.termsQuery( + DataHubUsageEventConstants.ACTOR_URN + ".keyword", + opContext.getActorPeers().stream() + .map(Object::toString) + .collect(Collectors.toList()))); + } + + return Optional.empty(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index 127b0f5c342c7..a2ba8cbcbcc61 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -15,6 +15,7 @@ import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; @@ -79,7 +80,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { analyticsEnabled = @@ -97,8 +98,8 @@ public boolean isEligible( @Override @WithSpan public List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { - SearchRequest searchRequest = buildSearchRequest(userUrn); + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { + SearchRequest searchRequest = buildSearchRequest(opContext.getActorContext().getActorUrn()); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyEdited").time()) { final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); @@ -127,6 +128,11 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { SearchRequest request = new SearchRequest(); SearchSourceBuilder source = new SearchSourceBuilder(); BoolQueryBuilder query = QueryBuilders.boolQuery(); + // Filter for the entity edit events of the user requesting recommendation + query.must( + QueryBuilders.termQuery( + ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), + userUrn.toString())); // Filter for the entity action events query.must( QueryBuilders.termQuery( diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 0ab5cf40cf4e5..d6bf7d94aecb6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -15,6 +15,7 @@ import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; @@ -79,7 +80,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { analyticsEnabled = @@ -97,8 +98,8 @@ public boolean isEligible( @Override @WithSpan public List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { - SearchRequest searchRequest = buildSearchRequest(userUrn); + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { + SearchRequest searchRequest = buildSearchRequest(opContext.getActorContext().getActorUrn()); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) { final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java index b862de320db36..3df3ad3945f59 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java @@ -4,10 +4,12 @@ import com.linkedin.metadata.graph.LineageDirection; import java.time.Instant; import java.time.temporal.TemporalUnit; +import javax.annotation.Nonnull; import lombok.Data; @Data public class EntityLineageResultCacheKey { + private final String contextId; private final Urn sourceUrn; private final LineageDirection direction; private final Long startTimeMillis; @@ -15,13 +17,14 @@ public class EntityLineageResultCacheKey { private final Integer maxHops; public EntityLineageResultCacheKey( + @Nonnull String contextId, Urn sourceUrn, LineageDirection direction, Long startTimeMillis, Long endTimeMillis, Integer maxHops, TemporalUnit resolution) { - + this.contextId = contextId; this.sourceUrn = sourceUrn; this.direction = direction; this.maxHops = maxHops; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index cf9279414a394..622f92b0bd7e9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -32,6 +32,7 @@ import com.linkedin.metadata.search.utils.FilterUtils; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.search.utils.SearchUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.time.temporal.ChronoUnit; @@ -66,6 +67,7 @@ public class LineageSearchService { .setSkipCache(false) .setSkipAggregates(false) .setSkipHighlighting(true) + .setIncludeRestricted(false) .setGroupingSpec( new GroupingSpec() .setGroupingCriteria( @@ -79,7 +81,6 @@ public class LineageSearchService { @Nullable private final Cache cache; private final boolean cacheEnabled; private final SearchLineageCacheConfiguration cacheConfiguration; - private final ExecutorService cacheRefillExecutor = Executors.newFixedThreadPool(1); private static final String DEGREE_FILTER = "degree"; @@ -125,6 +126,7 @@ public class LineageSearchService { @Nonnull @WithSpan public LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -135,25 +137,30 @@ public LineageSearchResult searchAcrossLineage( int from, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, - @Nullable SearchFlags searchFlags) { - - final SearchFlags finalFlags = - applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + @Nullable Long endTimeMillis) { long startTime = System.nanoTime(); - log.debug("Cache enabled {}, Input :{}:", cacheEnabled, input); - if ((input == null) || (input.isEmpty())) { - input = "*"; - } + final String finalInput = input == null || input.isEmpty() ? "*" : input; + + log.debug("Cache enabled {}, Input :{}:", cacheEnabled, finalInput); if (maxHops == null) { maxHops = 1000; } + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, finalInput, DEFAULT_SERVICE_SEARCH_FLAGS)); + // Cache multihop result for faster performance final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey( - sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); + finalOpContext.getSearchContextId(), + sourceUrn, + direction, + startTimeMillis, + endTimeMillis, + maxHops, + ChronoUnit.DAYS); CachedEntityLineageResult cachedLineageResult = null; if (cacheEnabled) { @@ -166,7 +173,8 @@ public LineageSearchResult searchAcrossLineage( EntityLineageResult lineageResult; FreshnessStats freshnessStats = new FreshnessStats().setCached(Boolean.FALSE); - if (cachedLineageResult == null || finalFlags.isSkipCache()) { + if (cachedLineageResult == null + || finalOpContext.getSearchContext().getSearchFlags().isSkipCache()) { lineageResult = _graphService.getLineage( sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); @@ -218,7 +226,8 @@ public LineageSearchResult searchAcrossLineage( } } - if (SearchUtils.convertSchemaFieldToDataset(searchFlags)) { + if (SearchUtils.convertSchemaFieldToDataset( + finalOpContext.getSearchContext().getSearchFlags())) { // set schemaField relationship entity to be its reference urn LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); @@ -243,7 +252,7 @@ public LineageSearchResult searchAcrossLineage( SearchUtils.removeCriteria( inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); - if (canDoLightning(lineageRelationships, input, reducedFilters, sortCriterion)) { + if (canDoLightning(lineageRelationships, finalInput, reducedFilters, sortCriterion)) { codePath = "lightning"; // use lightning approach to return lineage search results LineageSearchResult lineageSearchResult = @@ -260,7 +269,13 @@ public LineageSearchResult searchAcrossLineage( codePath = "tortoise"; LineageSearchResult lineageSearchResult = getSearchResultInBatches( - lineageRelationships, input, reducedFilters, sortCriterion, from, size, finalFlags); + finalOpContext, + lineageRelationships, + finalInput, + reducedFilters, + sortCriterion, + from, + size); if (!lineageSearchResult.getEntities().isEmpty()) { log.debug( "Lineage entity results number -> {}; first -> {}", @@ -311,7 +326,7 @@ LineageSearchResult getLightningSearchResult( int size, Set entityNames) { - // Contruct result objects + // Construct result objects LineageSearchResult finalResult = new LineageSearchResult().setMetadata(new SearchResultMetadata()); LineageSearchEntityArray lineageSearchEntityArray = new LineageSearchEntityArray(); @@ -506,16 +521,13 @@ private Map generateUrnToRelationshipMap( // Search service can only take up to 50K term filter, so query search service in batches private LineageSearchResult getSearchResultInBatches( + @Nonnull OperationContext opContext, List lineageRelationships, @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, int from, - int size, - @Nonnull SearchFlags searchFlags) { - - final SearchFlags finalFlags = - applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + int size) { LineageSearchResult finalResult = new LineageSearchResult() @@ -540,13 +552,14 @@ private LineageSearchResult getSearchResultInBatches( LineageSearchResult resultForBatch = buildLineageSearchResult( _searchService.searchAcrossEntities( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS)), entitiesToQuery, input, finalFilter, sortCriterion, queryFrom, - querySize, - finalFlags), + querySize), urnToRelationship); queryFrom = Math.max(0, from - resultForBatch.getNumEntities()); querySize = Math.max(0, size - resultForBatch.getEntities().size()); @@ -717,6 +730,7 @@ private LineageSearchEntity buildLineageSearchEntity( @Nonnull @WithSpan public LineageScrollResult scrollAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -728,12 +742,17 @@ public LineageScrollResult scrollAcrossLineage( @Nonnull String keepAlive, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, - @Nonnull SearchFlags searchFlags) { + @Nullable Long endTimeMillis) { // Cache multihop result for faster performance final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey( - sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); + opContext.getSearchContextId(), + sourceUrn, + direction, + startTimeMillis, + endTimeMillis, + maxHops, + ChronoUnit.DAYS); CachedEntityLineageResult cachedLineageResult = cacheEnabled ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; EntityLineageResult lineageResult; @@ -767,28 +786,31 @@ public LineageScrollResult scrollAcrossLineage( SearchUtils.removeCriteria( inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); return getScrollResultInBatches( + opContext, lineageRelationships, input != null ? input : "*", reducedFilters, sortCriterion, scrollId, keepAlive, - size, - searchFlags); + size); } // Search service can only take up to 50K term filter, so query search service in batches private LineageScrollResult getScrollResultInBatches( + @Nonnull OperationContext opContext, List lineageRelationships, @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, - int size, - @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = - applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + int size) { + + OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS)); + LineageScrollResult finalResult = new LineageScrollResult() .setEntities(new LineageSearchEntityArray(Collections.emptyList())) @@ -810,14 +832,14 @@ private LineageScrollResult getScrollResultInBatches( LineageScrollResult resultForBatch = buildLineageScrollResult( _searchService.scrollAcrossEntities( + finalOpContext, entitiesToQuery, input, finalFilter, sortCriterion, scrollId, keepAlive, - querySize, - finalFlags), + querySize), urnToRelationship); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = mergeScrollResult(finalResult, resultForBatch); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index 3bcc163613c5e..6e5bd63103190 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -4,7 +4,6 @@ import com.codahale.metrics.Timer; import com.linkedin.data.template.LongMap; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.cache.EntityDocCountCache; @@ -12,6 +11,7 @@ import com.linkedin.metadata.search.ranker.SearchRanker; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -38,14 +38,15 @@ public SearchService( _entityDocCountCache = entityDocCountCache; } - public Map docCountPerEntity(@Nonnull List entityNames) { + public Map docCountPerEntity( + @Nonnull OperationContext opContext, @Nonnull List entityNames) { return entityNames.stream() .collect( Collectors.toMap( Function.identity(), entityName -> _entityDocCountCache - .getEntityDocCount() + .getEntityDocCount(opContext) .getOrDefault(entityName.toLowerCase(), 0L))); } @@ -60,27 +61,26 @@ public Map docCountPerEntity(@Nonnull List entityNames) { * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags optional set of flags to control search behavior * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, - int size, - @Nullable SearchFlags searchFlags) { - List entitiesToSearch = getEntitiesToSearch(entityNames); + int size) { + List entitiesToSearch = getEntitiesToSearch(opContext, entityNames); if (entitiesToSearch.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } SearchResult result = _cachingEntitySearchService.search( - entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); + opContext, entitiesToSearch, input, postFilters, sortCriterion, from, size, null); try { return result @@ -94,15 +94,15 @@ public SearchResult search( @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, - int size, - @Nullable SearchFlags searchFlags) { + int size) { return searchAcrossEntities( - entities, input, postFilters, sortCriterion, from, size, searchFlags, null); + opContext, entities, input, postFilters, sortCriterion, from, size, null); } /** @@ -116,20 +116,19 @@ public SearchResult searchAcrossEntities( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags optional set of flags to control search behavior * @param facets list of facets we want aggregations for * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { log.debug( String.format( @@ -147,14 +146,14 @@ public SearchResult searchAcrossEntities( facets = new ArrayList<>(facets); facets.add(INDEX_VIRTUAL_FIELD); } - List nonEmptyEntities = getEntitiesToSearch(entities); + List nonEmptyEntities = getEntitiesToSearch(opContext, entities); if (nonEmptyEntities.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } SearchResult result = _cachingEntitySearchService.search( - nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); + opContext, nonEmptyEntities, input, postFilters, sortCriterion, from, size, facets); if (facets == null || facets.contains("entity") || facets.contains("_entityType")) { Optional entityTypeAgg = result.getMetadata().getAggregations().stream() @@ -206,7 +205,8 @@ public SearchResult searchAcrossEntities( * @param inputEntities the requested entities * @return some entities to search */ - private List getEntitiesToSearch(@Nonnull List inputEntities) { + private List getEntitiesToSearch( + @Nonnull OperationContext opContext, @Nonnull List inputEntities) { List nonEmptyEntities; List lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); @@ -214,7 +214,7 @@ private List getEntitiesToSearch(@Nonnull List inputEntities) { if (lowercaseEntities.isEmpty()) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { - nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); + nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(opContext); } } else { nonEmptyEntities = lowercaseEntities; @@ -234,38 +234,30 @@ private List getEntitiesToSearch(@Nonnull List inputEntities) { * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier for passing to search backend * @param size the number of search hits to return - * @param searchFlags optional set of flags to control search behavior * @return a {@link ScrollResult} that contains a list of matched documents and related search * result metadata */ @Nonnull public ScrollResult scrollAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags searchFlags) { + int size) { log.debug( String.format( "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", entities, input, postFilters, sortCriterion, scrollId, size)); - List entitiesToSearch = getEntitiesToSearch(entities); + List entitiesToSearch = getEntitiesToSearch(opContext, entities); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); } return _cachingEntitySearchService.scroll( - entitiesToSearch, - input, - postFilters, - sortCriterion, - scrollId, - keepAlive, - size, - searchFlags); + opContext, entitiesToSearch, input, postFilters, sortCriterion, scrollId, keepAlive, size); } private static SearchResult getEmptySearchResult(int from, int size) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java index 0ecdb83ed20ee..28efa29c9fffa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java @@ -3,17 +3,16 @@ import static com.datahub.util.RecordUtils.*; import com.codahale.metrics.Timer; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.function.Function; import javax.annotation.Nonnull; -import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.Value; import org.springframework.cache.Cache; @@ -28,7 +27,6 @@ public class CacheableSearcher { private final Function searcher; // Function that generates the cache key given the query batch (from, size) private final Function cacheKeyGenerator; - @Nullable private final SearchFlags searchFlags; private final boolean enableCache; @Value @@ -43,7 +41,7 @@ public static class QueryPagination implements Serializable { * that return a variable number of results (we have no idea which batch the "from" "size" page * corresponds to) */ - public SearchResult getSearchResults(int from, int size) { + public SearchResult getSearchResults(@Nonnull OperationContext opContext, int from, int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getSearchResults").time()) { int resultsSoFar = 0; int batchId = 0; @@ -52,7 +50,7 @@ public SearchResult getSearchResults(int from, int size) { SearchResult batchedResult; // Use do-while to make sure we run at least one batch to fetch metadata do { - batchedResult = getBatch(batchId); + batchedResult = getBatch(opContext, batchId); int currentBatchSize = batchedResult.getEntities().size(); // If the number of results in this batch is 0, no need to continue if (currentBatchSize == 0) { @@ -85,13 +83,14 @@ private QueryPagination getBatchQuerySize(int batchId) { return new QueryPagination(batchId * batchSize, batchSize); } - private SearchResult getBatch(int batchId) { + private SearchResult getBatch(@Nonnull OperationContext opContext, int batchId) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getBatch").time()) { QueryPagination batch = getBatchQuerySize(batchId); SearchResult result; if (enableCache) { K cacheKey = cacheKeyGenerator.apply(batch); - if ((searchFlags == null || !searchFlags.isSkipCache())) { + if ((opContext.getSearchContext().getSearchFlags().isSkipCache() == null + || !opContext.getSearchContext().getSearchFlags().isSkipCache())) { try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { Timer.Context cacheAccess = diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java index 2c99c71acf749..745ef6686d320 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java @@ -5,46 +5,56 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.utils.ConcurrencyUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EntityDocCountCache { - private final EntityRegistry _entityRegistry; - private final EntitySearchService _entitySearchService; - private final Supplier> entityDocCount; + private final EntityRegistry entityRegistry; + private final EntitySearchService entitySearchService; + private final EntityDocCountCacheConfiguration config; + private final Map>> entityDocCounts; public EntityDocCountCache( EntityRegistry entityRegistry, EntitySearchService entitySearchService, EntityDocCountCacheConfiguration config) { - _entityRegistry = entityRegistry; - _entitySearchService = entitySearchService; - entityDocCount = - Suppliers.memoizeWithExpiration( - this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); + this.config = config; + this.entityRegistry = entityRegistry; + this.entitySearchService = entitySearchService; + this.entityDocCounts = new ConcurrentHashMap<>(); } - private Map fetchEntityDocCount() { + private Map fetchEntityDocCount(@Nonnull OperationContext opContext) { return ConcurrencyUtils.transformAndCollectAsync( - _entityRegistry.getEntitySpecs().keySet(), + entityRegistry.getEntitySpecs().keySet(), Function.identity(), - Collectors.toMap(Function.identity(), _entitySearchService::docCount)); + Collectors.toMap(Function.identity(), v -> entitySearchService.docCount(opContext, v))); } @WithSpan - public Map getEntityDocCount() { - return entityDocCount.get(); + public Map getEntityDocCount(@Nonnull OperationContext opContext) { + return entityDocCounts + .computeIfAbsent(opContext.getSearchContextId(), k -> buildSupplier(opContext)) + .get(); } - public List getNonEmptyEntities() { - return getEntityDocCount().entrySet().stream() + public List getNonEmptyEntities(@Nonnull OperationContext opContext) { + return getEntityDocCount(opContext).entrySet().stream() .filter(entry -> entry.getValue() > 0) .map(Map.Entry::getKey) .collect(Collectors.toList()); } + + private Supplier> buildSupplier(@Nonnull OperationContext opContext) { + return Suppliers.memoizeWithExpiration( + () -> fetchEntityDocCount(opContext), config.getTtlSeconds(), TimeUnit.SECONDS); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index eaeae0cfc1556..5db427fa90148 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -14,6 +14,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.cache.CacheableSearcher; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; @@ -42,52 +43,53 @@ public class CachingEntitySearchService { * Retrieves cached search results. If the query has been cached, this will return quickly. If * not, a full search request will be made. * - * @param entityName the name of the entity to search + * @param opContext the operation's context + * @param entityNames the names of the entity to search * @param query the search query * @param filters the filters to include * @param sortCriterion the sort criterion * @param from the start offset * @param size the count - * @param flags additional search flags * @param facets list of facets we want aggregations for * @return a {@link SearchResult} containing the requested batch of search results */ public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String query, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags flags, @Nullable List facets) { return getCachedSearchResults( - entityNames, query, filters, sortCriterion, from, size, flags, facets); + opContext, entityNames, query, filters, sortCriterion, from, size, facets); } /** * Retrieves cached auto complete results * + * @param opContext the operation's context * @param entityName the name of the entity to search * @param input the input query * @param filters the filters to include * @param limit the max number of results to return - * @param flags additional search flags * @return a {@link SearchResult} containing the requested batch of search results */ public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String input, @Nullable String field, @Nullable Filter filters, - int limit, - @Nullable SearchFlags flags) { - return getCachedAutoCompleteResults(entityName, input, field, filters, limit, flags); + int limit) { + return getCachedAutoCompleteResults(opContext, entityName, input, field, filters, limit); } /** * Retrieves cached auto complete results * + * @param opContext the operation's context * @param entityName type of entity to query * @param path the path to be browsed * @param filters the request map with fields and values as filters @@ -96,19 +98,20 @@ public AutoCompleteResult autoComplete( * @return a {@link SearchResult} containing the requested batch of search results */ public BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, - int size, - @Nullable SearchFlags flags) { - return getCachedBrowseResults(entityName, path, filters, from, size, flags); + int size) { + return getCachedBrowseResults(opContext, entityName, path, filters, from, size); } /** * Retrieves cached scroll results. If the query has been cached, this will return quickly. If * not, a full scroll request will be made. * + * @param opContext the operation's context * @param entities the names of the entities to search * @param query the search query * @param filters the filters to include @@ -116,20 +119,19 @@ public BrowseResult browse( * @param scrollId opaque scroll identifier for a scroll request * @param keepAlive the string representation of how long to keep point in time alive * @param size the count - * @param flags additional search flags * @return a {@link ScrollResult} containing the requested batch of scroll results */ public ScrollResult scroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String query, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags flags) { + int size) { return getCachedScrollResults( - entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); + opContext, entities, query, filters, sortCriterion, scrollId, keepAlive, size); } /** @@ -139,65 +141,64 @@ public ScrollResult scroll( * corresponds to) */ public SearchResult getCachedSearchResults( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String query, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags flags, @Nullable List facets) { return new CacheableSearcher<>( cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), batchSize, querySize -> getRawSearchResults( + opContext, entityNames, query, filters, sortCriterion, querySize.getFrom(), querySize.getSize(), - flags, facets), querySize -> Septet.with( + opContext.getSearchContextId(), entityNames, query, filters != null ? toJsonString(filters) : null, sortCriterion != null ? toJsonString(sortCriterion) : null, - flags != null ? toJsonString(flags) : null, facets, querySize), - flags, enableCache) - .getSearchResults(from, size); + .getSearchResults(opContext, from, size); } /** Returns cached auto-complete results. */ public AutoCompleteResult getCachedAutoCompleteResults( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String input, @Nullable String field, @Nullable Filter filters, - int limit, - @Nullable SearchFlags flags) { + int limit) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME); AutoCompleteResult result; - if (enableCache(flags)) { + if (enableCache(opContext.getSearchContext().getSearchFlags())) { try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); Object cacheKey = Sextet.with( + opContext.getSearchContextId(), entityName, input, field, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, limit); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(AutoCompleteResult.class, json) : null; @@ -205,14 +206,14 @@ public AutoCompleteResult getCachedAutoCompleteResults( if (result == null) { Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); - result = getRawAutoCompleteResults(entityName, input, field, filters, limit); + result = getRawAutoCompleteResults(opContext, entityName, input, field, filters, limit); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "autocomplete_cache_miss_count").inc(); } } } else { - result = getRawAutoCompleteResults(entityName, input, field, filters, limit); + result = getRawAutoCompleteResults(opContext, entityName, input, field, filters, limit); } return result; } @@ -220,27 +221,27 @@ public AutoCompleteResult getCachedAutoCompleteResults( /** Returns cached browse results. */ public BrowseResult getCachedBrowseResults( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, - int size, - @Nullable SearchFlags flags) { + int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME); BrowseResult result; - if (enableCache(flags)) { + if (enableCache(opContext.getSearchContext().getSearchFlags())) { try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time(); Object cacheKey = Sextet.with( + opContext.getSearchContextId(), entityName, path, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, from, size); String json = cache.get(cacheKey, String.class); @@ -249,14 +250,14 @@ public BrowseResult getCachedBrowseResults( if (result == null) { Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); - result = getRawBrowseResults(entityName, path, filters, from, size); + result = getRawBrowseResults(opContext, entityName, path, filters, from, size); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "browse_cache_miss_count").inc(); } } } else { - result = getRawBrowseResults(entityName, path, filters, from, size); + result = getRawBrowseResults(opContext, entityName, path, filters, from, size); } return result; } @@ -264,30 +265,33 @@ public BrowseResult getCachedBrowseResults( /** Returns cached scroll results. */ public ScrollResult getCachedScrollResults( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String query, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags flags) { + int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { boolean isFullText = - Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); + Boolean.TRUE.equals( + Optional.ofNullable(opContext.getSearchContext().getSearchFlags()) + .orElse(new SearchFlags()) + .isFulltext()); Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); ScrollResult result; - if (enableCache(flags)) { + if (enableCache(opContext.getSearchContext().getSearchFlags())) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); Object cacheKey = Septet.with( + opContext.getSearchContextId(), entities, query, filters != null ? toJsonString(filters) : null, sortCriterion != null ? toJsonString(sortCriterion) : null, - flags != null ? toJsonString(flags) : null, scrollId, size); String json = cache.get(cacheKey, String.class); @@ -297,6 +301,7 @@ public ScrollResult getCachedScrollResults( Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "scroll_cache_miss").time(); result = getRawScrollResults( + opContext, entities, query, filters, @@ -304,8 +309,7 @@ public ScrollResult getCachedScrollResults( scrollId, keepAlive, size, - isFullText, - flags); + isFullText); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "scroll_cache_miss_count").inc(); @@ -313,6 +317,7 @@ public ScrollResult getCachedScrollResults( } else { result = getRawScrollResults( + opContext, entities, query, filters, @@ -320,8 +325,7 @@ public ScrollResult getCachedScrollResults( scrollId, keepAlive, size, - isFullText, - flags); + isFullText); } return result; } @@ -329,40 +333,43 @@ public ScrollResult getCachedScrollResults( /** Executes the expensive search query using the {@link EntitySearchService} */ private SearchResult getRawSearchResults( + @Nonnull OperationContext opContext, final List entityNames, final String input, final Filter filters, final SortCriterion sortCriterion, final int start, final int count, - @Nullable final SearchFlags searchFlags, @Nullable final List facets) { return entitySearchService.search( - entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); + opContext, entityNames, input, filters, sortCriterion, start, count, facets); } /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private AutoCompleteResult getRawAutoCompleteResults( + @Nonnull OperationContext opContext, final String entityName, final String input, final String field, final Filter filters, final int limit) { - return entitySearchService.autoComplete(entityName, input, field, filters, limit); + return entitySearchService.autoComplete(opContext, entityName, input, field, filters, limit); } /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private BrowseResult getRawBrowseResults( + @Nonnull OperationContext opContext, final String entityName, final String input, final Filter filters, final int start, final int count) { - return entitySearchService.browse(entityName, input, filters, start, count); + return entitySearchService.browse(opContext, entityName, input, filters, start, count); } /** Executes the expensive search query using the {@link EntitySearchService} */ private ScrollResult getRawScrollResults( + @Nonnull OperationContext opContext, final List entities, final String input, final Filter filters, @@ -370,19 +377,18 @@ private ScrollResult getRawScrollResults( @Nullable final String scrollId, @Nullable final String keepAlive, final int count, - final boolean fulltext, - @Nullable final SearchFlags searchFlags) { + final boolean fulltext) { if (fulltext) { return entitySearchService.fullTextScroll( - entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); + opContext, entities, input, filters, sortCriterion, scrollId, keepAlive, count); } else { return entitySearchService.structuredScroll( - entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); + opContext, entities, input, filters, sortCriterion, scrollId, keepAlive, count); } } /** Returns true if the cache should be used or skipped when fetching search results */ - private boolean enableCache(final SearchFlags searchFlags) { + private boolean enableCache(@Nullable final SearchFlags searchFlags) { return enableCache && (searchFlags == null || !searchFlags.isSkipCache()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 936ecb6a8ead1..0effed1d9a578 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.browse.BrowseResult; @@ -20,6 +22,7 @@ import com.linkedin.metadata.search.utils.SearchUtils; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.structured.StructuredPropertyDefinition; +import io.datahubproject.metadata.context.OperationContext; import java.io.IOException; import java.util.Collection; import java.util.List; @@ -36,6 +39,16 @@ @RequiredArgsConstructor public class ElasticSearchService implements EntitySearchService, ElasticSearchIndexed { + public static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() + .setFulltext(false) + .setMaxAggValues(20) + .setSkipCache(false) + .setSkipAggregates(false) + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false); + private static final int MAX_RUN_IDS_INDEXED = 25; // Save the previous 25 run ids in the index. private final EntityIndexBuilders indexBuilders; private final ESSearchDAO esSearchDAO; @@ -76,8 +89,11 @@ public void clear() { } @Override - public long docCount(@Nonnull String entityName) { - return esSearchDAO.docCount(entityName); + public long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName) { + return esSearchDAO.docCount( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityName); } @Override @@ -127,37 +143,47 @@ public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable @Nonnull @Override public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, - int size, - @Nullable SearchFlags searchFlags) { - return search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, null); + int size) { + return search(opContext, entityNames, input, postFilters, sortCriterion, from, size, null); } @Nonnull public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { log.debug( String.format( "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", entityNames, input, postFilters, sortCriterion, from, size)); + return esSearchDAO.search( - entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityNames, + input, + postFilters, + sortCriterion, + from, + size, + facets); } @Nonnull @Override public SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, @@ -167,12 +193,21 @@ public SearchResult filter( String.format( "Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", entityName, filters, sortCriterion, from, size)); - return esSearchDAO.filter(entityName, filters, sortCriterion, from, size); + + return esSearchDAO.filter( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityName, + filters, + sortCriterion, + from, + size); } @Nonnull @Override public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String query, @Nullable String field, @@ -182,12 +217,21 @@ public AutoCompleteResult autoComplete( String.format( "Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", entityName, query, field, requestParams, limit)); - return esSearchDAO.autoComplete(entityName, query, field, requestParams, limit); + + return esSearchDAO.autoComplete( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, query, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityName, + query, + field, + requestParams, + limit); } @Nonnull @Override public Map aggregateByValue( + @Nonnull OperationContext opContext, @Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, @@ -198,12 +242,20 @@ public Map aggregateByValue( field, requestParams, limit); - return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); + + return esSearchDAO.aggregateByValue( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityNames, + field, + requestParams, + limit); } @Nonnull @Override public BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, @@ -213,33 +265,58 @@ public BrowseResult browse( String.format( "Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", entityName, path, filters, from, size)); - return esBrowseDAO.browse(entityName, path, filters, from, size); + return esBrowseDAO.browse( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityName, + path, + filters, + from, + size); } @Nonnull @Override public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags) { - return esBrowseDAO.browseV2(entityName, path, filter, input, start, count, searchFlags); + int count) { + + return esBrowseDAO.browseV2( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityName, + path, + filter, + input, + start, + count); } @Nonnull @Override public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags) { - return esBrowseDAO.browseV2(entityNames, path, filter, input, start, count, searchFlags); + int count) { + + return esBrowseDAO.browseV2( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS)), + entityNames, + path, + filter, + input, + start, + count); } @Nonnull @@ -253,43 +330,61 @@ public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) @Nonnull @Override public ScrollResult fullTextScroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags searchFlags) { + int size) { log.debug( String.format( "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", entities, input, postFilters, sortCriterion, scrollId, size)); - SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); - flags.setFulltext(true); + return esSearchDAO.scroll( - entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); + opContext.withSearchFlags( + flags -> + applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS) + .setFulltext(true)), + entities, + input, + postFilters, + sortCriterion, + scrollId, + keepAlive, + size); } @Nonnull @Override public ScrollResult structuredScroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags searchFlags) { + int size) { log.debug( String.format( "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", entities, input, postFilters, sortCriterion, scrollId, size)); - SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); - flags.setFulltext(false); + return esSearchDAO.scroll( - entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); + opContext.withSearchFlags( + flags -> + applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS) + .setFulltext(false)), + entities, + input, + postFilters, + sortCriterion, + scrollId, + keepAlive, + size); } public Optional raw(@Nonnull String indexName, @Nullable String jsonQuery) { @@ -303,23 +398,25 @@ public int maxResultSize() { @Override public ExplainResponse explain( + @Nonnull OperationContext opContext, @Nonnull String query, @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, - @Nullable SearchFlags searchFlags, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable List facets) { + return esSearchDAO.explain( + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), query, documentId, entityName, postFilters, sortCriterion, - searchFlags, scrollId, keepAlive, size, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 79f530f18a345..1958bed33c92b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.ENTITY_TYPE_URN_PREFIX; import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_MAPPING_FIELD; import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; +import static com.linkedin.metadata.models.annotation.SearchableAnnotation.OBJECT_FIELD_TYPES; import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; import com.google.common.collect.ImmutableMap; @@ -53,6 +54,7 @@ public static Map getPartialNgramConfigWithOverrides( public static final String PATH = "path"; public static final String PROPERTIES = "properties"; + public static final String DYNAMIC_TEMPLATES = "dynamic_templates"; private MappingsBuilder() {} @@ -100,6 +102,7 @@ public static Map getMappings( return merged.isEmpty() ? null : merged; }); } + return mappings; } @@ -221,7 +224,7 @@ private static Map getMappingsForField( mappingForField.put(TYPE, ESUtils.LONG_FIELD_TYPE); } else if (fieldType == FieldType.DATETIME) { mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); - } else if (fieldType == FieldType.OBJECT) { + } else if (OBJECT_FIELD_TYPES.contains(fieldType)) { mappingForField.put(TYPE, ESUtils.OBJECT_FIELD_TYPE); } else if (fieldType == FieldType.DOUBLE) { mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index bb6905139f49d..fbb7fcadba8bc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder.PROPERTIES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TYPE; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -8,6 +10,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; +import com.linkedin.metadata.search.utils.ESUtils; import java.util.List; import java.util.Map; import java.util.Objects; @@ -146,9 +149,10 @@ public ReindexConfig build() { if (super.exists) { /* Consider mapping changes */ MapDifference mappingsDiff = - Maps.difference( - getOrDefault(super.currentMappings, List.of("properties")), - getOrDefault(super.targetMappings, List.of("properties"))); + calculateMapDifference( + getOrDefault(super.currentMappings, List.of(PROPERTIES)), + getOrDefault(super.targetMappings, List.of(PROPERTIES))); + super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() || !mappingsDiff.entriesOnlyOnRight().isEmpty(); @@ -298,6 +302,47 @@ private boolean isSettingsReindexRequired() { (Map) indexSettings.get("analysis"), super.currentSettings.getByPrefix("index.analysis.")); } + + /** + * Dynamic fields should not be considered as part of the difference. This might need to be + * improved in the future for nested object fields. + * + * @param currentMappings current mappings + * @param targetMappings target mappings + * @return difference map + */ + private static MapDifference calculateMapDifference( + Map currentMappings, Map targetMappings) { + + // Identify dynamic object fields in target + Set targetObjectFields = + targetMappings.entrySet().stream() + .filter( + entry -> + ((Map) entry.getValue()).containsKey(TYPE) + && ((Map) entry.getValue()) + .get(TYPE) + .equals(ESUtils.OBJECT_FIELD_TYPE)) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + + if (!targetObjectFields.isEmpty()) { + log.info("Object fields filtered from comparison: {}", targetObjectFields); + Map filteredCurrentMappings = + removeKeys(currentMappings, targetObjectFields); + Map filteredTargetMappings = removeKeys(targetMappings, targetObjectFields); + return Maps.difference(filteredCurrentMappings, filteredTargetMappings); + } + + return Maps.difference(currentMappings, targetMappings); + } + } + + private static Map removeKeys( + Map mapObject, Set keysToRemove) { + return mapObject.entrySet().stream() + .filter(entry -> !keysToRemove.contains(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } private static boolean equalsGroup(Map newSettings, Settings oldSettings) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index dd1c09853114d..7209c1ce147be 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; -import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; +import static com.linkedin.metadata.search.utils.ESUtils.applyDefaultSearchFilters; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; import com.codahale.metrics.Timer; import com.datahub.util.exception.ESQueryException; @@ -29,6 +30,7 @@ import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; @@ -86,6 +88,14 @@ public class ESBrowseDAO { // Set explicit max size for grouping private static final int AGGREGATION_MAX_SIZE = 2000; + private static final SearchFlags DEFAULT_BROWSE_SEARCH_FLAGS = + new SearchFlags() + .setFulltext(false) + .setSkipHighlighting(true) + .setGetSuggestions(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false); + @Value private class BrowseGroupsResult { List groups; @@ -112,6 +122,7 @@ private class BrowseGroupsResultV2 { */ @Nonnull public BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, @@ -119,6 +130,10 @@ public BrowseResult browse( int size) { final Map requestMap = SearchUtils.getRequestMap(filters); + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, path, DEFAULT_BROWSE_SEARCH_FLAGS)); + try { final String indexName = indexConvention.getIndexName( @@ -128,7 +143,8 @@ public BrowseResult browse( try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { groupsResponse = client.search( - constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); + constructGroupsSearchRequest(finalOpContext, indexName, path, requestMap), + RequestOptions.DEFAULT); } final BrowseGroupsResult browseGroupsResult = extractGroupsResponse(groupsResponse, path, from, size); @@ -144,7 +160,8 @@ public BrowseResult browse( try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esEntitiesSearch").time()) { entitiesResponse = client.search( - constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), + constructEntitiesSearchRequest( + finalOpContext, indexName, path, requestMap, entityFrom, entitySize), RequestOptions.DEFAULT); } final int numEntities = (int) entitiesResponse.getHits().getTotalHits().value; @@ -194,11 +211,14 @@ private AggregationBuilder buildAggregations(@Nonnull String path) { */ @Nonnull protected SearchRequest constructGroupsSearchRequest( - @Nonnull String indexName, @Nonnull String path, @Nonnull Map requestMap) { + @Nonnull OperationContext opContext, + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map requestMap) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); - searchSourceBuilder.query(buildQueryString(path, requestMap, true)); + searchSourceBuilder.query(buildQueryString(opContext, path, requestMap, true)); searchSourceBuilder.aggregation(buildAggregations(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -214,12 +234,15 @@ protected SearchRequest constructGroupsSearchRequest( */ @Nonnull private QueryBuilder buildQueryString( - @Nonnull String path, @Nonnull Map requestMap, boolean isGroupQuery) { + @Nonnull OperationContext opContext, + @Nonnull String path, + @Nonnull Map requestMap, + boolean isGroupQuery) { final int browseDepthVal = getPathDepth(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); - queryBuilder.mustNot(QueryBuilders.termQuery(REMOVED, "true")); + applyDefaultSearchFilters(opContext, null, queryBuilder); if (!path.isEmpty()) { queryBuilder.filter(QueryBuilders.termQuery(BROWSE_PATH, path)); @@ -247,6 +270,7 @@ private QueryBuilder buildQueryString( @VisibleForTesting @Nonnull SearchRequest constructEntitiesSearchRequest( + @Nonnull OperationContext opContext, @Nonnull String indexName, @Nonnull String path, @Nonnull Map requestMap, @@ -258,7 +282,7 @@ SearchRequest constructEntitiesSearchRequest( searchSourceBuilder.size(size); searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); - searchSourceBuilder.query(buildQueryString(path, requestMap, false)); + searchSourceBuilder.query(buildQueryString(opContext, path, requestMap, false)); searchRequest.source(searchSourceBuilder); return searchRequest; } @@ -276,6 +300,7 @@ SearchRequest constructEntitiesSearchRequest( @VisibleForTesting @Nonnull SearchRequest constructEntitiesSearchRequest( + @Nonnull OperationContext opContext, @Nonnull String indexName, @Nonnull String path, @Nonnull Map requestMap, @@ -291,7 +316,7 @@ SearchRequest constructEntitiesSearchRequest( searchSourceBuilder.size(size); searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); - searchSourceBuilder.query(buildQueryString(path, requestMap, false)); + searchSourceBuilder.query(buildQueryString(opContext, path, requestMap, false)); searchRequest.source(searchSourceBuilder); return searchRequest; } @@ -401,20 +426,25 @@ public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) } public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags) { + int count) { try { final SearchResponse groupsResponse; + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, path, DEFAULT_BROWSE_SEARCH_FLAGS)); + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = client.search( - constructGroupsSearchRequestV2(entityName, path, filter, finalInput, searchFlags), + constructGroupsSearchRequestV2( + finalOpContext, entityName, path, filter, finalInput), RequestOptions.DEFAULT); } @@ -438,22 +468,25 @@ public BrowseResultV2 browseV2( } public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags) { + int count) { try { final SearchResponse groupsResponse; + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> applyDefaultSearchFlags(flags, path, DEFAULT_BROWSE_SEARCH_FLAGS)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = client.search( constructGroupsSearchRequestBrowseAcrossEntities( - entities, path, filter, finalInput, searchFlags), + finalOpContext, entities, path, filter, finalInput), RequestOptions.DEFAULT); } @@ -478,11 +511,11 @@ public BrowseResultV2 browseV2( @Nonnull private SearchRequest constructGroupsSearchRequestV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, - @Nullable SearchFlags searchFlags) { + @Nonnull String input) { final String indexName = indexConvention.getIndexName(aspectRetriever.getEntityRegistry().getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); @@ -490,11 +523,11 @@ private SearchRequest constructGroupsSearchRequestV2( searchSourceBuilder.size(0); searchSourceBuilder.query( buildQueryStringV2( + opContext, entityName, path, SearchUtil.transformFilterForEntities(filter, indexConvention), - input, - searchFlags)); + input)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -502,11 +535,11 @@ private SearchRequest constructGroupsSearchRequestV2( @Nonnull private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, - @Nullable SearchFlags searchFlags) { + @Nonnull String input) { List entitySpecs = entities.stream() @@ -521,11 +554,11 @@ private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( searchSourceBuilder.size(0); searchSourceBuilder.query( buildQueryStringBrowseAcrossEntities( + opContext, entitySpecs, path, SearchUtil.transformFilterForEntities(filter, indexConvention), - input, - searchFlags)); + input)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -550,13 +583,16 @@ private static int getPathDepthV2(@Nonnull String path) { @Nonnull private QueryBuilder buildQueryStringV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, - @Nullable SearchFlags searchFlags) { - SearchFlags finalSearchFlags = - Optional.ofNullable(searchFlags).orElse(new SearchFlags().setFulltext(true)); + @Nonnull String input) { + + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> Optional.ofNullable(flags).orElse(new SearchFlags().setFulltext(true))); + final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -565,11 +601,12 @@ private QueryBuilder buildQueryStringV2( QueryBuilder query = SearchRequestHandler.getBuilder( entitySpec, searchConfiguration, customSearchConfiguration, aspectRetriever) - .getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext())); + .getQuery( + input, + Boolean.TRUE.equals( + finalOpContext.getSearchContext().getSearchFlags().isFulltext())); queryBuilder.must(query); - filterSoftDeletedByDefault(filter, queryBuilder); - if (!path.isEmpty()) { queryBuilder.filter(QueryBuilders.matchQuery(BROWSE_PATH_V2, path)); } @@ -578,20 +615,21 @@ private QueryBuilder buildQueryStringV2( queryBuilder.filter( SearchRequestHandler.getFilterQuery( - filter, entitySpec.getSearchableFieldTypes(), aspectRetriever)); + finalOpContext, filter, entitySpec.getSearchableFieldTypes(), aspectRetriever)); return queryBuilder; } @Nonnull private QueryBuilder buildQueryStringBrowseAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entitySpecs, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, - @Nullable SearchFlags searchFlags) { - SearchFlags finalSearchFlags = - Optional.ofNullable(searchFlags).orElse(new SearchFlags().setFulltext(true)); + @Nonnull String input) { + final OperationContext finalOpContext = + opContext.withSearchFlags( + flags -> Optional.ofNullable(flags).orElse(new SearchFlags().setFulltext(true))); final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -599,7 +637,10 @@ private QueryBuilder buildQueryStringBrowseAcrossEntities( QueryBuilder query = SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) - .getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext())); + .getQuery( + input, + Boolean.TRUE.equals( + finalOpContext.getSearchContext().getSearchFlags().isFulltext())); queryBuilder.must(query); if (!path.isEmpty()) { @@ -620,7 +661,8 @@ private QueryBuilder buildQueryStringBrowseAcrossEntities( return set1; })); queryBuilder.filter( - SearchRequestHandler.getFilterQuery(filter, searchableFields, aspectRetriever)); + SearchRequestHandler.getFilterQuery( + finalOpContext, filter, searchableFields, aspectRetriever)); return queryBuilder; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 4cfb54dacb5f0..3fd1062fb25c5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.AggregationMetadata; @@ -29,6 +28,7 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.ArrayList; @@ -81,13 +81,13 @@ public class ESSearchDAO { @Nonnull private final SearchConfiguration searchConfiguration; @Nullable private final CustomSearchConfiguration customSearchConfiguration; - public long docCount(@Nonnull String entityName) { + public long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName) { EntitySpec entitySpec = aspectRetriever.getEntityRegistry().getEntitySpec(entityName); CountRequest countRequest = new CountRequest(indexConvention.getIndexName(entitySpec)) .query( SearchRequestHandler.getFilterQuery( - null, entitySpec.getSearchableFieldTypes(), aspectRetriever)); + opContext, null, entitySpec.getSearchableFieldTypes(), aspectRetriever)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { @@ -99,6 +99,7 @@ public long docCount(@Nonnull String entityName) { @Nonnull @WithSpan private SearchResult executeAndExtract( + @Nonnull OperationContext opContext, @Nonnull List entitySpec, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, @@ -113,7 +114,7 @@ private SearchResult executeAndExtract( return transformIndexIntoEntityName( SearchRequestHandler.getBuilder( entitySpec, searchConfiguration, customSearchConfiguration, aspectRetriever) - .extractResult(searchResponse, filter, from, size)); + .extractResult(opContext, searchResponse, filter, from, size)); } catch (Exception e) { log.error("Search query failed", e); throw new ESQueryException("Search query failed:", e); @@ -184,6 +185,7 @@ private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadat @Nonnull @WithSpan private ScrollResult executeAndExtract( + @Nonnull OperationContext opContext, @Nonnull List entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, @@ -198,7 +200,13 @@ private ScrollResult executeAndExtract( SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) .extractScrollResult( - searchResponse, filter, scrollId, keepAlive, size, supportsPointInTime())); + opContext, + searchResponse, + filter, + scrollId, + keepAlive, + size, + supportsPointInTime())); } catch (Exception e) { log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); @@ -215,20 +223,19 @@ private ScrollResult executeAndExtract( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags Structured or full text search modes, plus other misc options * @param facets list of facets we want aggregations for * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { final String finalInput = input.isEmpty() ? "*" : input; Timer.Context searchRequestTimer = MetricUtils.timer(this.getClass(), "searchRequest").time(); @@ -242,12 +249,12 @@ public SearchResult search( SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) .getSearchRequest( - finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); + opContext, finalInput, transformedFilters, sortCriterion, from, size, facets); searchRequest.indices( entityNames.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new)); searchRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well - return executeAndExtract(entitySpecs, searchRequest, transformedFilters, from, size); + return executeAndExtract(opContext, entitySpecs, searchRequest, transformedFilters, from, size); } /** @@ -263,6 +270,7 @@ public SearchResult search( */ @Nonnull public SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, @@ -273,10 +281,11 @@ public SearchResult filter( final SearchRequest searchRequest = SearchRequestHandler.getBuilder( entitySpec, searchConfiguration, customSearchConfiguration, aspectRetriever) - .getFilterRequest(transformedFilters, sortCriterion, from, size); + .getFilterRequest(opContext, transformedFilters, sortCriterion, from, size); searchRequest.indices(indexConvention.getIndexName(entitySpec)); - return executeAndExtract(List.of(entitySpec), searchRequest, transformedFilters, from, size); + return executeAndExtract( + opContext, List.of(entitySpec), searchRequest, transformedFilters, from, size); } /** @@ -293,18 +302,24 @@ public SearchResult filter( */ @Nonnull public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String query, @Nullable String field, @Nullable Filter requestParams, int limit) { try { + EntitySpec entitySpec = aspectRetriever.getEntityRegistry().getEntitySpec(entityName); AutocompleteRequestHandler builder = AutocompleteRequestHandler.getBuilder(entitySpec, aspectRetriever); SearchRequest req = builder.getSearchRequest( - query, field, transformFilterForEntities(requestParams, indexConvention), limit); + opContext, + query, + field, + transformFilterForEntities(requestParams, indexConvention), + limit); req.indices(indexConvention.getIndexName(entitySpec)); SearchResponse searchResponse = client.search(req, RequestOptions.DEFAULT); return builder.extractResult(searchResponse, query); @@ -325,10 +340,12 @@ public AutoCompleteResult autoComplete( */ @Nonnull public Map aggregateByValue( + @Nonnull OperationContext opContext, @Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, int limit) { + List entitySpecs; if (entityNames == null || entityNames.isEmpty()) { entitySpecs = QueryUtils.getQueryByDefaultEntitySpecs(aspectRetriever.getEntityRegistry()); @@ -342,7 +359,10 @@ public Map aggregateByValue( SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) .getAggregationRequest( - field, transformFilterForEntities(requestParams, indexConvention), limit); + opContext, + field, + transformFilterForEntities(requestParams, indexConvention), + limit); if (entityNames == null) { String indexName = indexConvention.getAllEntityIndicesPattern(); searchRequest.indices(indexName); @@ -381,14 +401,14 @@ public Map aggregateByValue( */ @Nonnull public ScrollResult scroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - SearchFlags searchFlags) { + int size) { final String finalInput = input.isEmpty() ? "*" : input; String[] indexArray = entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); @@ -401,6 +421,7 @@ public ScrollResult scroll( // TODO: Align scroll and search using facets final SearchRequest searchRequest = getScrollRequest( + opContext, scrollId, keepAlive, indexArray, @@ -409,7 +430,6 @@ public ScrollResult scroll( entitySpecs, finalInput, sortCriterion, - searchFlags, null); // PIT specifies indices in creation so it doesn't support specifying indices on the request, so @@ -420,10 +440,11 @@ public ScrollResult scroll( scrollRequestTimer.stop(); return executeAndExtract( - entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); + opContext, entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); } private SearchRequest getScrollRequest( + @Nonnull OperationContext opContext, @Nullable String scrollId, @Nullable String keepAlive, String[] indexArray, @@ -432,7 +453,6 @@ private SearchRequest getScrollRequest( List entitySpecs, String finalInput, @Nullable SortCriterion sortCriterion, - @Nullable SearchFlags searchFlags, @Nullable List facets) { String pitId = null; Object[] sort = null; @@ -453,6 +473,7 @@ private SearchRequest getScrollRequest( return SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) .getSearchRequest( + opContext, finalInput, postFilters, sortCriterion, @@ -460,7 +481,6 @@ private SearchRequest getScrollRequest( pitId, keepAlive, size, - searchFlags, facets); } @@ -507,12 +527,12 @@ private String createPointInTime(String[] indexArray, String keepAlive) { } public ExplainResponse explain( + @Nonnull OperationContext opContext, @Nonnull String query, @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, - @Nullable SearchFlags searchFlags, @Nullable String scrollId, @Nullable String keepAlive, int size, @@ -523,6 +543,7 @@ public ExplainResponse explain( final String finalQuery = query.isEmpty() ? "*" : query; final SearchRequest searchRequest = getScrollRequest( + opContext, scrollId, keepAlive, indexArray, @@ -531,7 +552,6 @@ public ExplainResponse explain( Collections.singletonList(entitySpec), finalQuery, sortCriterion, - searchFlags, facets); ; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index fb3b51930370c..a32a8d1a54234 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -149,7 +149,8 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { case MISSING_SPECIAL_TYPE: aggBuilder = INDEX_VIRTUAL_FIELD.equalsIgnoreCase(specialTypeFields.get(1)) - ? AggregationBuilders.missing(inputFacet).field(getAggregationField("_index")) + ? AggregationBuilders.missing(inputFacet) + .field(getAggregationField(ES_INDEX_FIELD)) : AggregationBuilders.missing(inputFacet) .field(getAggregationField(specialTypeFields.get(1))); break; @@ -161,7 +162,7 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { aggBuilder = facet.equalsIgnoreCase(INDEX_VIRTUAL_FIELD) ? AggregationBuilders.terms(inputFacet) - .field(getAggregationField("_index")) + .field(getAggregationField(ES_INDEX_FIELD)) .size(configs.getMaxTermBucketSize()) .minDocCount(0) : AggregationBuilders.terms(inputFacet) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index de35d53bcde49..3e6ce53b7af5c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; +import static com.linkedin.metadata.search.utils.ESUtils.applyDefaultSearchFilters; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -14,6 +15,7 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.utils.ESUtils; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashSet; @@ -32,7 +34,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MultiMatchQueryBuilder; -import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; @@ -84,11 +85,19 @@ public static AutocompleteRequestHandler getBuilder( } public SearchRequest getSearchRequest( - @Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { + @Nonnull OperationContext opContext, + @Nonnull String input, + @Nullable String field, + @Nullable Filter filter, + int limit) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); - searchSourceBuilder.query(getQuery(input, field)); + // apply default filters + BoolQueryBuilder boolQueryBuilder = + applyDefaultSearchFilters(opContext, filter, getQuery(input, field)); + + searchSourceBuilder.query(boolQueryBuilder); searchSourceBuilder.postFilter( ESUtils.buildFilterQuery(filter, false, searchableFieldTypes, aspectRetriever)); searchSourceBuilder.highlighter(getHighlights(field)); @@ -96,11 +105,11 @@ public SearchRequest getSearchRequest( return searchRequest; } - private QueryBuilder getQuery(@Nonnull String query, @Nullable String field) { + private BoolQueryBuilder getQuery(@Nonnull String query, @Nullable String field) { return getQuery(getAutocompleteFields(field), query); } - public static QueryBuilder getQuery(List autocompleteFields, @Nonnull String query) { + public static BoolQueryBuilder getQuery(List autocompleteFields, @Nonnull String query) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Search for exact matches with higher boost and ngram matches MultiMatchQueryBuilder autocompleteQueryBuilder = @@ -126,8 +135,6 @@ public static QueryBuilder getQuery(List autocompleteFields, @Nonnull St }); finalQuery.should(autocompleteQueryBuilder); - - finalQuery.mustNot(QueryBuilders.matchQuery("removed", true)); return finalQuery; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java index 7709ff16f7940..d681df00546ac 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java @@ -51,7 +51,8 @@ public class SearchFieldConfig { SearchableAnnotation.FieldType.BOOLEAN, SearchableAnnotation.FieldType.COUNT, SearchableAnnotation.FieldType.DATETIME, - SearchableAnnotation.FieldType.OBJECT); + SearchableAnnotation.FieldType.OBJECT, + SearchableAnnotation.FieldType.MAP_ARRAY); // NOT true for `urn` public static final Set TYPES_WITH_URN_TEXT = Set.of(SearchableAnnotation.FieldType.URN, SearchableAnnotation.FieldType.URN_PARTIAL); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index db09c52d2099c..0ae23445140e0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -1,8 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; -import static com.linkedin.metadata.utils.SearchUtil.*; +import static com.linkedin.metadata.search.utils.ESUtils.applyDefaultSearchFilters; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; @@ -30,8 +29,10 @@ import com.linkedin.metadata.search.SearchSuggestion; import com.linkedin.metadata.search.SearchSuggestionArray; import com.linkedin.metadata.search.features.Features; +import com.linkedin.metadata.search.utils.ESAccessControlUtil; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.ArrayList; @@ -64,13 +65,7 @@ @Slf4j public class SearchRequestHandler { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = - new SearchFlags() - .setFulltext(false) - .setMaxAggValues(20) - .setSkipCache(false) - .setSkipAggregates(false) - .setSkipHighlighting(false); + private static final Map, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); private final List entitySpecs; @@ -170,18 +165,19 @@ private Set getDefaultQueryFieldNames(List annotat .collect(Collectors.toSet()); } - public BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { - return getFilterQuery(filter, searchableFieldTypes, aspectRetriever); + public BoolQueryBuilder getFilterQuery( + @Nonnull OperationContext opContext, @Nullable Filter filter) { + return getFilterQuery(opContext, filter, searchableFieldTypes, aspectRetriever); } public static BoolQueryBuilder getFilterQuery( + @Nonnull OperationContext opContext, @Nullable Filter filter, Map> searchableFieldTypes, @Nonnull AspectRetriever aspectRetriever) { BoolQueryBuilder filterQuery = ESUtils.buildFilterQuery(filter, false, searchableFieldTypes, aspectRetriever); - - return filterSoftDeletedByDefault(filter, filterQuery); + return applyDefaultSearchFilters(opContext, filter, filterQuery); } /** @@ -194,23 +190,21 @@ public static BoolQueryBuilder getFilterQuery( * @param filter the search filter * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags Various flags controlling search query options * @param facets list of facets we want aggregations for * @return a valid search request */ @Nonnull @WithSpan public SearchRequest getSearchRequest( + @Nonnull OperationContext opContext, @Nonnull String input, @Nullable Filter filter, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { - SearchFlags finalSearchFlags = - applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -218,20 +212,20 @@ public SearchRequest getSearchRequest( searchSourceBuilder.size(size); searchSourceBuilder.fetchSource("urn", null); - BoolQueryBuilder filterQuery = getFilterQuery(filter); + BoolQueryBuilder filterQuery = getFilterQuery(opContext, filter); searchSourceBuilder.query( QueryBuilders.boolQuery() - .must(getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext()))) + .must(getQuery(input, Boolean.TRUE.equals(searchFlags.isFulltext()))) .filter(filterQuery)); - if (Boolean.FALSE.equals(finalSearchFlags.isSkipAggregates())) { + if (Boolean.FALSE.equals(searchFlags.isSkipAggregates())) { aggregationQueryBuilder.getAggregations(facets).forEach(searchSourceBuilder::aggregation); } - if (Boolean.FALSE.equals(finalSearchFlags.isSkipHighlighting())) { + if (Boolean.FALSE.equals(searchFlags.isSkipHighlighting())) { searchSourceBuilder.highlighter(highlights); } ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, entitySpecs); - if (Boolean.TRUE.equals(finalSearchFlags.isGetSuggestions())) { + if (Boolean.TRUE.equals(searchFlags.isGetSuggestions())) { ESUtils.buildNameSuggestions(searchSourceBuilder, input); } @@ -256,6 +250,7 @@ public SearchRequest getSearchRequest( @Nonnull @WithSpan public SearchRequest getSearchRequest( + @Nonnull OperationContext opContext, @Nonnull String input, @Nullable Filter filter, @Nullable SortCriterion sortCriterion, @@ -263,11 +258,10 @@ public SearchRequest getSearchRequest( @Nullable String pitId, @Nullable String keepAlive, int size, - SearchFlags searchFlags, @Nullable List facets) { + SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); SearchRequest searchRequest = new PITAwareSearchRequest(); - SearchFlags finalSearchFlags = - applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); @@ -275,15 +269,15 @@ public SearchRequest getSearchRequest( searchSourceBuilder.size(size); searchSourceBuilder.fetchSource("urn", null); - BoolQueryBuilder filterQuery = getFilterQuery(filter); + BoolQueryBuilder filterQuery = getFilterQuery(opContext, filter); searchSourceBuilder.query( QueryBuilders.boolQuery() - .must(getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext()))) + .must(getQuery(input, Boolean.TRUE.equals(searchFlags.isFulltext()))) .filter(filterQuery)); - if (Boolean.FALSE.equals(finalSearchFlags.isSkipAggregates())) { + if (Boolean.FALSE.equals(searchFlags.isSkipAggregates())) { aggregationQueryBuilder.getAggregations(facets).forEach(searchSourceBuilder::aggregation); } - if (Boolean.FALSE.equals(finalSearchFlags.isSkipHighlighting())) { + if (Boolean.FALSE.equals(searchFlags.isSkipHighlighting())) { searchSourceBuilder.highlighter(highlights); } ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, entitySpecs); @@ -306,10 +300,14 @@ public SearchRequest getSearchRequest( */ @Nonnull public SearchRequest getFilterRequest( - @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size) { + @Nonnull OperationContext opContext, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { SearchRequest searchRequest = new SearchRequest(); - BoolQueryBuilder filterQuery = getFilterQuery(filters); + BoolQueryBuilder filterQuery = getFilterQuery(opContext, filters); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); searchSourceBuilder.from(from).size(size); @@ -329,9 +327,13 @@ public SearchRequest getFilterRequest( */ @Nonnull public SearchRequest getAggregationRequest( - @Nonnull String field, @Nullable Filter filter, int limit) { + @Nonnull OperationContext opContext, + @Nonnull String field, + @Nullable Filter filter, + int limit) { + SearchRequest searchRequest = new SearchRequest(); - BoolQueryBuilder filterQuery = getFilterQuery(filter); + BoolQueryBuilder filterQuery = getFilterQuery(opContext, filter); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); @@ -366,10 +368,15 @@ public HighlightBuilder getHighlights() { @WithSpan public SearchResult extractResult( - @Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { + @Nonnull OperationContext opContext, + @Nonnull SearchResponse searchResponse, + Filter filter, + int from, + int size) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; - List resultList = getResults(searchResponse); - SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); + List resultList = getResults(opContext, searchResponse); + SearchResultMetadata searchResultMetadata = + extractSearchResultMetadata(opContext, searchResponse, filter); return new SearchResult() .setEntities(new SearchEntityArray(resultList)) @@ -381,6 +388,7 @@ public SearchResult extractResult( @WithSpan public ScrollResult extractScrollResult( + @Nonnull OperationContext opContext, @Nonnull SearchResponse searchResponse, Filter filter, @Nullable String scrollId, @@ -388,8 +396,9 @@ public ScrollResult extractScrollResult( int size, boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; - List resultList = getResults(searchResponse); - SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); + List resultList = getResults(opContext, searchResponse); + SearchResultMetadata searchResultMetadata = + extractSearchResultMetadata(opContext, searchResponse, filter); SearchHit[] searchHits = searchResponse.getHits().getHits(); // Only return next scroll ID if there are more results, indicated by full size results String nextScrollId = null; @@ -484,10 +493,13 @@ private SearchEntity getResult(@Nonnull SearchHit hit) { * @return List of search entities */ @Nonnull - private List getResults(@Nonnull SearchResponse searchResponse) { - return Arrays.stream(searchResponse.getHits().getHits()) - .map(this::getResult) - .collect(Collectors.toList()); + private List getResults( + @Nonnull OperationContext opContext, @Nonnull SearchResponse searchResponse) { + return ESAccessControlUtil.restrictSearchResult( + opContext, + Arrays.stream(searchResponse.getHits().getHits()) + .map(this::getResult) + .collect(Collectors.toList())); } @Nonnull @@ -509,13 +521,18 @@ private Urn getUrnFromSearchHit(@Nonnull SearchHit hit) { */ @Nonnull private SearchResultMetadata extractSearchResultMetadata( - @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + @Nonnull OperationContext opContext, + @Nonnull SearchResponse searchResponse, + @Nullable Filter filter) { + final SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); final SearchResultMetadata searchResultMetadata = new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - final List aggregationMetadataList = - aggregationQueryBuilder.extractAggregationMetadata(searchResponse, filter); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + if (Boolean.FALSE.equals(searchFlags.isSkipAggregates())) { + final List aggregationMetadataList = + aggregationQueryBuilder.extractAggregationMetadata(searchResponse, filter); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + } final List searchSuggestions = extractSearchSuggestions(searchResponse); searchResultMetadata.setSuggestions(new SearchSuggestionArray(searchSuggestions)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index 75c3d23d26c66..a291b27ebebef 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; +import static com.linkedin.metadata.models.annotation.SearchableAnnotation.OBJECT_FIELD_TYPES; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -26,6 +27,7 @@ import com.linkedin.structured.StructuredPropertyValueAssignment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; @@ -181,7 +183,7 @@ public void setSearchableValue( return; } - if (isArray || (valueType == DataSchema.Type.MAP && fieldType != FieldType.OBJECT)) { + if (isArray || (valueType == DataSchema.Type.MAP && !OBJECT_FIELD_TYPES.contains(fieldType))) { if (fieldType == FieldType.BROWSE_PATH_V2) { String browsePathV2Value = getBrowsePathV2Value(fieldValues); searchDocument.set(fieldName, JsonNodeFactory.instance.textNode(browsePathV2Value)); @@ -193,6 +195,25 @@ public void setSearchableValue( value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); searchDocument.set(fieldName, arrayNode); } + } else if (valueType == DataSchema.Type.MAP && FieldType.MAP_ARRAY.equals(fieldType)) { + ObjectNode dictDoc = JsonNodeFactory.instance.objectNode(); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxObjectKeys)) + .forEach( + fieldValue -> { + String[] keyValues = fieldValue.toString().split("="); + String key = keyValues[0]; + ArrayNode values = JsonNodeFactory.instance.arrayNode(); + Arrays.stream(keyValues[1].substring(1, keyValues[1].length() - 1).split(", ")) + .forEach( + v -> { + if (!v.isEmpty()) { + values.add(v); + } + }); + dictDoc.set(key, values); + }); + searchDocument.set(fieldName, dictDoc); } else if (valueType == DataSchema.Type.MAP) { ObjectNode dictDoc = JsonNodeFactory.instance.objectNode(); fieldValues diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java new file mode 100644 index 0000000000000..cf3eabb9c5a82 --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java @@ -0,0 +1,260 @@ +package com.linkedin.metadata.search.utils; + +import static com.linkedin.metadata.authorization.PoliciesConfig.VIEW_ENTITY_PRIVILEGES; +import static com.linkedin.metadata.utils.SearchUtil.ES_INDEX_FIELD; +import static com.linkedin.metadata.utils.SearchUtil.KEYWORD_SUFFIX; + +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.aspect.hooks.OwnerTypeMap; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.timeseries.elastic.indexbuilder.MappingsBuilder; +import com.linkedin.policy.DataHubActorFilter; +import com.linkedin.policy.DataHubPolicyInfo; +import com.linkedin.policy.PolicyMatchCriterion; +import com.linkedin.policy.PolicyMatchCriterionArray; +import io.datahubproject.metadata.context.ActorContext; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.TermsQueryBuilder; + +@Slf4j +public class ESAccessControlUtil { + private ESAccessControlUtil() {} + + private static final String OWNER_TYPES_FIELD = "ownerTypes"; + private static final QueryBuilder MATCH_ALL = QueryBuilders.matchAllQuery(); + + /** + * Given the OperationContext produce a filter for search results + * + * @param opContext the OperationContext of the search + * @return + */ + public static Optional buildAccessControlFilters( + @Nonnull OperationContext opContext) { + Optional response = Optional.empty(); + + /* + If search authorization is enabled AND we're also not the system performing the query + */ + if (opContext.getOperationContextConfig().getSearchAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth() + && !opContext.getSearchContext().isRestrictedSearch()) { + + BoolQueryBuilder builder = QueryBuilders.boolQuery(); + + // Apply access policies + streamViewQueries(opContext).distinct().forEach(builder::should); + + if (builder.should().isEmpty()) { + // default no filters + return Optional.of(builder.mustNot(MATCH_ALL)); + } else if (!builder.should().contains(MATCH_ALL)) { + // if MATCH_ALL is not present, apply filters requiring at least 1 + builder.minimumShouldMatch(1); + response = Optional.of(builder); + } + } + + // MATCH_ALL filter present or system user or disabled + return response; + } + + /** + * Given an OperationContext and SearchResult, mark the restricted entities. Currently, the entire + * entity is marked as restricted using the key aspect name + * + * @param searchResult restricted search result + */ + public static void restrictSearchResult( + @Nonnull OperationContext opContext, @Nonnull SearchResult searchResult) { + restrictSearchResult(opContext, searchResult.getEntities()); + } + + public static > T restrictSearchResult( + @Nonnull OperationContext opContext, T searchEntities) { + if (opContext.getOperationContextConfig().getSearchAuthorizationConfiguration().isEnabled() + && opContext.getSearchContext().isRestrictedSearch()) { + final EntityRegistry entityRegistry = opContext.getEntityRegistry(); + final String actorUrnStr = + opContext.getSessionActorContext().getAuthentication().getActor().toUrnStr(); + final DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(VIEW_ENTITY_PRIVILEGES))); + + for (SearchEntity searchEntity : searchEntities) { + final String entityType = searchEntity.getEntity().getEntityType(); + final Optional resourceSpec = + Optional.of(new EntitySpec(entityType, searchEntity.getEntity().toString())); + if (!AuthUtil.isAuthorized( + opContext.getAuthorizerContext().getAuthorizer(), actorUrnStr, resourceSpec, orGroup)) { + final String keyAspectName = + entityRegistry.getEntitySpecs().get(entityType.toLowerCase()).getKeyAspectName(); + searchEntity.setRestrictedAspects(new StringArray(List.of(keyAspectName))); + } + } + } + return searchEntities; + } + + private static final Function activeMetadataViewEntityPolicyFilter = + policy -> + policy.getPrivileges() != null + && PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState()) + && PoliciesConfig.METADATA_POLICY_TYPE.equals(policy.getType()) + && VIEW_ENTITY_PRIVILEGES.stream() + .anyMatch(priv -> policy.getPrivileges().contains(priv)); + + private static Stream streamViewQueries(OperationContext opContext) { + return opContext.getSessionActorContext().getPolicyInfoSet().stream() + .filter(activeMetadataViewEntityPolicyFilter::apply) + .map( + policy -> { + // Build actor query + QueryBuilder actorQuery = buildActorQuery(opContext, policy); + + if (!policy.hasResources()) { + // no resource restrictions + return actorQuery; + } else { + + // No filters or criteria + if (!policy.getResources().hasFilter() + || !policy.getResources().getFilter().hasCriteria()) { + return null; + } + + PolicyMatchCriterionArray criteriaArray = + policy.getResources().getFilter().getCriteria(); + // Cannot apply policy if we can't map every field + if (!criteriaArray.stream().allMatch(criteria -> toESField(criteria).isPresent())) { + return null; + } + + BoolQueryBuilder resourceQuery = QueryBuilders.boolQuery(); + // apply actor filter if present + if (!MATCH_ALL.equals(actorQuery)) { + resourceQuery.filter(actorQuery); + } + // add resource query + buildResourceQuery(opContext, criteriaArray).forEach(resourceQuery::filter); + return resourceQuery; + } + }) + .filter(Objects::nonNull); + } + + /** + * Build an entity index query for ownership policies. If no restrictions, returns MATCH_ALL query + * + * @param opContext context + * @param policy policy + * @return filter query + */ + private static QueryBuilder buildActorQuery( + OperationContext opContext, DataHubPolicyInfo policy) { + DataHubActorFilter actorFilter = policy.getActors(); + + if (!policy.hasActors() + || !(actorFilter.isResourceOwners() || actorFilter.hasResourceOwnersTypes())) { + // no owner restriction + return MATCH_ALL; + } + + ActorContext actorContext = opContext.getSessionActorContext(); + + // policy might apply to the actor via user or group + List actorAndGroupUrns = + Stream.concat( + Stream.of(actorContext.getAuthentication().getActor().toUrnStr()), + actorContext.getGroupMembership().stream().map(Urn::toString)) + .map(String::toLowerCase) + .distinct() + .collect(Collectors.toList()); + + if (!actorFilter.hasResourceOwnersTypes()) { + // owners without owner type restrictions + return QueryBuilders.termsQuery( + ESUtils.toKeywordField(MappingsBuilder.OWNERS_FIELD, false), actorAndGroupUrns); + } else { + // owners with type restrictions + BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); + orQuery.minimumShouldMatch(1); + + Set typeFields = + actorFilter.getResourceOwnersTypes().stream() + .map( + typeUrn -> + String.format( + "%s.%s%s", + OWNER_TYPES_FIELD, + OwnerTypeMap.encodeFieldName(typeUrn.toString()), + KEYWORD_SUFFIX)) + .collect(Collectors.toSet()); + + typeFields.forEach( + field -> orQuery.should(QueryBuilders.termsQuery(field, actorAndGroupUrns))); + + return orQuery; + } + } + + private static Stream buildResourceQuery( + OperationContext opContext, PolicyMatchCriterionArray criteriaArray) { + return criteriaArray.stream() + .map( + criteria -> + QueryBuilders.termsQuery( + toESField(criteria).get(), toESValues(opContext, criteria))); + } + + private static Optional toESField(PolicyMatchCriterion criterion) { + switch (criterion.getField()) { + case "TYPE": + return Optional.of(ES_INDEX_FIELD); + case "URN": + return Optional.of(ESUtils.toKeywordField(MappingsBuilder.URN_FIELD, false)); + case "TAG": + return Optional.of(ESUtils.toKeywordField(MappingsBuilder.TAGS_FIELD, false)); + case "DOMAIN": + return Optional.of(ESUtils.toKeywordField(MappingsBuilder.DOMAINS_FIELD, false)); + default: + return Optional.empty(); + } + } + + private static Collection toESValues( + OperationContext opContext, PolicyMatchCriterion criterion) { + switch (criterion.getField()) { + case "TYPE": + return criterion.getValues().stream() + .map( + value -> + opContext.getSearchContext().getIndexConvention().getEntityIndexName(value)) + .collect(Collectors.toSet()); + default: + return criterion.getValues(); + } + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 5ca5087d5ac35..6c4507216482f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.search.utils; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.annotation.SearchableAnnotation.OBJECT_FIELD_TYPES; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; @@ -11,11 +12,13 @@ import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import io.datahubproject.metadata.context.OperationContext; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -54,6 +57,7 @@ public class ESUtils { public static final int MAX_RESULT_SIZE = 10000; public static final String OPAQUE_ID_HEADER = "X-Opaque-Id"; public static final String HEADER_VALUE_DELIMITER = "|"; + private static final String REMOVED = "removed"; // Field types public static final String KEYWORD_FIELD_TYPE = "keyword"; @@ -149,6 +153,8 @@ public static BoolQueryBuilder buildFilterQuery( or -> finalQueryBuilder.should( ESUtils.buildConjunctiveFilterQuery(or, isTimeseries, searchableFieldTypes))); + // The default is not always 1 (ensure consistent default) + finalQueryBuilder.minimumShouldMatch(1); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); @@ -165,6 +171,8 @@ public static BoolQueryBuilder buildFilterQuery( } }); finalQueryBuilder.should(andQueryBuilder); + // The default is not always 1 (ensure consistent default) + finalQueryBuilder.minimumShouldMatch(1); } return finalQueryBuilder; } @@ -263,7 +271,7 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f return LONG_FIELD_TYPE; } else if (fieldType == SearchableAnnotation.FieldType.DATETIME) { return DATE_FIELD_TYPE; - } else if (fieldType == SearchableAnnotation.FieldType.OBJECT) { + } else if (OBJECT_FIELD_TYPES.contains(fieldType)) { return OBJECT_FIELD_TYPE; } else if (fieldType == SearchableAnnotation.FieldType.DOUBLE) { return DOUBLE_FIELD_TYPE; @@ -666,4 +674,43 @@ private static QueryBuilder buildEqualsFromCriterionWithValue( .analyzer(KEYWORD_ANALYZER))); return filters; } + + @Nonnull + public static BoolQueryBuilder applyDefaultSearchFilters( + @Nonnull OperationContext opContext, + @Nullable Filter filter, + @Nonnull BoolQueryBuilder filterQuery) { + // filter soft deleted entities by default + filterSoftDeletedByDefault(filter, filterQuery, opContext.getSearchContext().getSearchFlags()); + // filter based on access controls + ESAccessControlUtil.buildAccessControlFilters(opContext).ifPresent(filterQuery::filter); + return filterQuery; + } + + /** + * Applies a default filter to remove entities that are soft deleted only if there isn't a filter + * for the REMOVED field already and soft delete entities are not being requested via search flags + */ + private static void filterSoftDeletedByDefault( + @Nullable Filter filter, + @Nonnull BoolQueryBuilder filterQuery, + @Nonnull SearchFlags searchFlags) { + if (Boolean.FALSE.equals(searchFlags.isIncludeSoftDeleted())) { + boolean removedInOrFilter = false; + if (filter != null) { + removedInOrFilter = + filter.getOr().stream() + .anyMatch( + or -> + or.getAnd().stream() + .anyMatch( + criterion -> + criterion.getField().equals(REMOVED) + || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX))); + } + if (!removedInOrFilter) { + filterQuery.mustNot(QueryBuilders.matchQuery(REMOVED, true)); + } + } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java index 13ccfd7f972af..3ddc004dd9fa9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java @@ -198,6 +198,14 @@ public static SearchFlags applyDefaultSearchFlags( if (!finalSearchFlags.hasSkipCache() || finalSearchFlags.isSkipCache() == null) { finalSearchFlags.setSkipCache(defaultFlags.isSkipCache()); } + if (!finalSearchFlags.hasIncludeSoftDeleted() + || finalSearchFlags.isIncludeSoftDeleted() == null) { + finalSearchFlags.setIncludeSoftDeleted(defaultFlags.isIncludeSoftDeleted()); + } + if (!finalSearchFlags.hasIncludeRestricted() + || finalSearchFlags.isIncludeRestricted() == null) { + finalSearchFlags.setIncludeRestricted(defaultFlags.isIncludeRestricted()); + } if ((!finalSearchFlags.hasGroupingSpec() || finalSearchFlags.getGroupingSpec() == null) && (defaultFlags.getGroupingSpec() != null)) { finalSearchFlags.setGroupingSpec(defaultFlags.getGroupingSpec()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java index 5bb523c8a8c1e..b2f1571d99407 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java @@ -11,6 +11,9 @@ public class MappingsBuilder { public static final String URN_FIELD = "urn"; + public static final String TAGS_FIELD = "tags"; + public static final String OWNERS_FIELD = "owners"; + public static final String DOMAINS_FIELD = "domains"; public static final String MESSAGE_ID_FIELD = "messageId"; public static final String TIMESTAMP_FIELD = "@timestamp"; public static final String TIMESTAMP_MILLIS_FIELD = "timestampMillis"; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index 5a4443904e260..3ae2e52fe7208 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -16,6 +16,7 @@ import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.util.function.Supplier; import org.mockito.MockedStatic; import org.testng.annotations.AfterMethod; @@ -35,6 +36,7 @@ public class JavaEntityClientTest { private MockedStatic _metricUtils; private RollbackService rollbackService; private Counter _counter; + private OperationContext opContext; @BeforeMethod public void setupTest() { @@ -50,6 +52,7 @@ public void setupTest() { _metricUtils = mockStatic(MetricUtils.class); _counter = mock(Counter.class); when(MetricUtils.counter(any(), any())).thenReturn(_counter); + opContext = mock(OperationContext.class); } @AfterMethod @@ -59,6 +62,7 @@ public void closeTest() { private JavaEntityClient getJavaEntityClient() { return new JavaEntityClient( + opContext, _entityService, _deleteEntityService, _entitySearchService, diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 7f0e4294e0cbf..71f247ebfc29a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -54,7 +54,7 @@ public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { @Nonnull protected abstract ESIndexBuilder getIndexBuilder(); - private final IndexConvention _indexConvention = new IndexConventionImpl(null); + private final IndexConvention _indexConvention = IndexConventionImpl.NO_PREFIX; private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); private ElasticSearchGraphService _client; private boolean _enableMultiPathSearch = diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java index c0faf6fdfee6c..f117c42572bd5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java @@ -1,14 +1,17 @@ package com.linkedin.metadata.recommendation; +import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntityUtil; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.candidatesource.TestSource; import com.linkedin.metadata.recommendation.ranker.RecommendationModuleRanker; import com.linkedin.metadata.recommendation.ranker.SimpleRecommendationRanker; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; @@ -79,7 +82,8 @@ public void testService() throws URISyntaxException { new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); List result = service.listRecommendations( - Urn.createFromString("urn:li:corpuser:me"), + TestOperationContexts.userContextNoSearchAuthorization( + mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertTrue(result.isEmpty()); @@ -90,7 +94,8 @@ public void testService() throws URISyntaxException { ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); result = service.listRecommendations( - Urn.createFromString("urn:li:corpuser:me"), + TestOperationContexts.userContextNoSearchAuthorization( + mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertEquals(result.size(), 1); @@ -106,7 +111,8 @@ public void testService() throws URISyntaxException { ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), ranker); result = service.listRecommendations( - Urn.createFromString("urn:li:corpuser:me"), + TestOperationContexts.userContextNoSearchAuthorization( + mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertEquals(result.size(), 4); @@ -134,7 +140,8 @@ public void testService() throws URISyntaxException { // Test limit result = service.listRecommendations( - Urn.createFromString("urn:li:corpuser:me"), + TestOperationContexts.userContextNoSearchAuthorization( + mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 2); assertEquals(result.size(), 2); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index 2d60f3202b69f..eb616ee15a292 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -3,6 +3,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; @@ -20,6 +21,8 @@ import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.List; import javax.annotation.Nonnull; @@ -30,10 +33,11 @@ import org.testng.annotations.Test; public class EntitySearchAggregationCandidateSourceTest { - private EntitySearchService _entitySearchService = Mockito.mock(EntitySearchService.class); - private EntityRegistry entityRegistry = Mockito.mock(EntityRegistry.class); + private EntitySearchService _entitySearchService = mock(EntitySearchService.class); + private EntityRegistry entityRegistry = mock(EntityRegistry.class); private EntitySearchAggregationSource _valueBasedCandidateSource; private EntitySearchAggregationSource _urnBasedCandidateSource; + private OperationContext opContext; private static final Urn USER = new CorpuserUrn("test"); private static final RecommendationRequestContext CONTEXT = @@ -41,6 +45,7 @@ public class EntitySearchAggregationCandidateSourceTest { @BeforeMethod public void setup() { + opContext = TestOperationContexts.userContextNoSearchAuthorization(entityRegistry, USER); Mockito.reset(_entitySearchService); _valueBasedCandidateSource = buildCandidateSource("testValue", false); _urnBasedCandidateSource = buildCandidateSource("testUrn", true); @@ -81,7 +86,8 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, + @Nonnull RecommendationRequestContext requestContext) { return true; } }; @@ -90,21 +96,24 @@ public boolean isEligible( @Test public void testWhenSearchServiceReturnsEmpty() { Mockito.when( - _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + _entitySearchService.aggregateByValue( + any(OperationContext.class), eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(Collections.emptyMap()); List candidates = - _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertTrue(candidates.isEmpty()); - assertFalse(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); + assertFalse(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } @Test public void testWhenSearchServiceReturnsValueResults() { // One result - Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue( + any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); List candidates = - _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), "value1"); @@ -119,12 +128,14 @@ public void testWhenSearchServiceReturnsValueResults() { new Criterion().setField("testValue").setValue("value1")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); - assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); + assertTrue(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue( + any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); - candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + candidates = _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); assertEquals(content.getValue(), "value3"); @@ -152,7 +163,7 @@ public void testWhenSearchServiceReturnsValueResults() { new Criterion().setField("testValue").setValue("value2")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); - assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); + assertTrue(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } @Test @@ -161,10 +172,12 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn1 = new TestEntityUrn("testUrn1", "testUrn1", "testUrn1"); Urn testUrn2 = new TestEntityUrn("testUrn2", "testUrn2", "testUrn2"); Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); - Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testUrn"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue( + any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); List candidates = - _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); + _urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), testUrn1.toString()); @@ -179,14 +192,16 @@ public void testWhenSearchServiceReturnsUrnResults() { new Criterion().setField("testUrn").setValue(testUrn1.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); - assertTrue(_urnBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); + assertTrue(_urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testUrn"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue( + any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt())) .thenReturn( ImmutableMap.of( testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); - candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); + candidates = _urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); assertEquals(content.getValue(), testUrn3.toString()); @@ -214,6 +229,6 @@ public void testWhenSearchServiceReturnsUrnResults() { new Criterion().setField("testUrn").setValue(testUrn2.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); - assertTrue(_urnBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); + assertTrue(_urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java index 3998e45195b25..51b4ecf1410a1 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java @@ -1,9 +1,14 @@ package com.linkedin.metadata.recommendation.candidatesource; +import static org.mockito.Mockito.mock; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.models.registry.EntityRegistry; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import org.junit.Assert; import org.testng.annotations.Test; @@ -13,13 +18,17 @@ public class RecommendationUtilsTest { @Test private void testIsSupportedEntityType() { Urn testUrn = UrnUtils.getUrn("urn:li:corpuser:john"); + OperationContext opContext = + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class), testUrn); + Assert.assertTrue( RecommendationUtils.isSupportedEntityType( - testUrn, + opContext, ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME))); Assert.assertFalse( RecommendationUtils.isSupportedEntityType( - testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME))); - Assert.assertFalse(RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet())); + opContext, ImmutableSet.of(Constants.DATASET_ENTITY_NAME))); + Assert.assertFalse( + RecommendationUtils.isSupportedEntityType(opContext, Collections.emptySet())); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java index 666deb2c419d7..4350cdd2662a8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.recommendation.RecommendationContent; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import javax.annotation.Nonnull; import lombok.Getter; @@ -36,13 +36,13 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return eligible; } @Override public List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return contents; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java index 57fa51ffbdd90..3c03f0b201aef 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java @@ -13,8 +13,8 @@ public class LineageSearchResultCacheKeyTest extends AbstractTestNGSpringContext public void testNulls() { // ensure no NPE assertEquals( - new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); + new EntityLineageResultCacheKey("", null, null, null, null, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey("", null, null, null, null, null, ChronoUnit.DAYS)); } @Test @@ -22,13 +22,13 @@ public void testDateTruncation() { // expect start of day milli assertEquals( new EntityLineageResultCacheKey( - null, null, 1679529600000L, 1679615999999L, null, ChronoUnit.DAYS), + "", null, null, 1679529600000L, 1679615999999L, null, ChronoUnit.DAYS), new EntityLineageResultCacheKey( - null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); + "", null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); assertNotSame( new EntityLineageResultCacheKey( - null, null, 1679529600000L, 1679616000000L, null, ChronoUnit.DAYS), + "", null, null, 1679529600000L, 1679616000000L, null, ChronoUnit.DAYS), new EntityLineageResultCacheKey( - null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); + "", null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index 9588140bebd65..52f91fb1b8c28 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -17,6 +17,7 @@ import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -42,7 +43,6 @@ import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.models.registry.SnapshotEntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -64,6 +64,8 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; @@ -74,6 +76,7 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.Getter; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -112,6 +115,8 @@ public abstract class LineageServiceTestBase extends AbstractTestNGSpringContext private LineageSearchService lineageSearchService; private RestHighLevelClient searchClientSpy; + @Getter private OperationContext operationContext; + private static final String ENTITY_NAME = "testEntity"; private static final Urn TEST_URN = TestEntityUtil.getTestEntityUrn(); private static final String TEST = "test"; @@ -133,6 +138,10 @@ public void setup() throws RemoteInvocationException, URISyntaxException { .thenReturn(new SnapshotEntityRegistry(new Snapshot())); when(aspectRetriever.getLatestAspectObjects(any(), any())).thenReturn(Map.of()); indexConvention = new IndexConventionImpl("lineage_search_service_test"); + operationContext = + TestOperationContexts.systemContextNoSearchAuthorization( + aspectRetriever.getEntityRegistry(), indexConvention) + .asSession(Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildEntitySearchService(); elasticSearchService.configure(); @@ -368,6 +377,7 @@ public void testSearchService() throws Exception { searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(true)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -378,8 +388,7 @@ public void testSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 1); Mockito.verify(graphService, times(1)) @@ -395,6 +404,7 @@ public void testSearchService() throws Exception { // Hit the cache on second attempt searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -405,8 +415,7 @@ public void testSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 1); Mockito.verify(graphService, times(1)) .getLineage( @@ -434,6 +443,7 @@ public void testSearchService() throws Exception { searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), @@ -444,8 +454,7 @@ public void testSearchService() throws Exception { 0, 10, 0L, - 1L, - new SearchFlags().setSkipCache(false)); + 1L); assertEquals(searchResult.getNumEntities().intValue(), 1); Mockito.verify(graphService, times(1)) @@ -461,6 +470,7 @@ public void testSearchService() throws Exception { // Hit the cache on second attempt searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -471,8 +481,7 @@ public void testSearchService() throws Exception { 0, 10, 0L, - 1L, - new SearchFlags().setSkipCache(false)); + 1L); assertEquals(searchResult.getNumEntities().intValue(), 1); Mockito.verify(graphService, times(1)) .getLineage( @@ -764,6 +773,7 @@ public void testLightningSearchService() throws Exception { searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -774,8 +784,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 1); verify(graphService, times(1)) @@ -793,6 +802,7 @@ public void testLightningSearchService() throws Exception { // Hit the cache on second attempt searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -803,8 +813,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 1); verify(graphService, times(1)) .getLineage( @@ -834,6 +843,7 @@ public void testLightningSearchService() throws Exception { searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), @@ -844,8 +854,7 @@ public void testLightningSearchService() throws Exception { 0, 10, 0L, - 1L, - new SearchFlags().setSkipCache(false)); + 1L); assertEquals(searchResult.getNumEntities().intValue(), 1); verify(graphService, times(1)) @@ -863,6 +872,7 @@ public void testLightningSearchService() throws Exception { // Hit the cache on second attempt searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -873,8 +883,7 @@ public void testLightningSearchService() throws Exception { 0, 10, 0L, - 1L, - new SearchFlags().setSkipCache(false)); + 1L); assertEquals(searchResult.getNumEntities().intValue(), 1); verify(graphService, times(1)) .getLineage( @@ -896,6 +905,7 @@ public void testLightningSearchService() throws Exception { // Entity searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), @@ -906,8 +916,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); verify(lineageSearchService, times(1)) @@ -916,6 +925,7 @@ public void testLightningSearchService() throws Exception { // Cached searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), @@ -926,8 +936,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); Mockito.verify(graphService, times(1)) .getLineage( eq(TEST_URN), @@ -959,6 +968,7 @@ public void testLightningSearchService() throws Exception { searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -969,8 +979,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); verify(lineageSearchService, times(3)) @@ -979,6 +988,7 @@ public void testLightningSearchService() throws Exception { // Cached searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -989,8 +999,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); verify(graphService, times(1)) .getLineage( eq(TEST_URN), @@ -1009,6 +1018,7 @@ public void testLightningSearchService() throws Exception { Filter originFilter = QueryUtils.newFilter("origin", "PROD"); searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -1019,8 +1029,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); verify(lineageSearchService, times(5)) @@ -1029,6 +1038,7 @@ public void testLightningSearchService() throws Exception { // Cached searchResult = lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(false)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), @@ -1039,8 +1049,7 @@ public void testLightningSearchService() throws Exception { 0, 10, null, - null, - new SearchFlags().setSkipCache(false)); + null); verify(graphService, times(1)) .getLineage( eq(TEST_URN), @@ -1284,6 +1293,7 @@ private LineageRelationship constructLineageRelationship(Urn urn) { // Convenience method to reduce spots where we're sending the same params private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullable String input) { return lineageSearchService.searchAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(true)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), @@ -1294,13 +1304,13 @@ private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullab 0, 10, null, - null, - new SearchFlags().setSkipCache(true)); + null); } private LineageScrollResult scrollAcrossLineage( @Nullable Filter filter, @Nullable String input, String scrollId, int size) { return lineageSearchService.scrollAcrossLineage( + getOperationContext().withSearchFlags(flags -> flags.setSkipCache(true)), TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), @@ -1312,8 +1322,7 @@ private LineageScrollResult scrollAcrossLineage( "5m", size, null, - null, - new SearchFlags().setSkipCache(true)); + null); } private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index d860776a31681..3b233ed8ad710 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -7,6 +7,7 @@ import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -19,7 +20,6 @@ import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.registry.SnapshotEntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -40,9 +40,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.Map; import javax.annotation.Nonnull; +import lombok.Getter; import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; @@ -74,6 +77,7 @@ public abstract class SearchServiceTestBase extends AbstractTestNGSpringContextT private ElasticSearchService elasticSearchService; private CacheManager cacheManager; private SearchService searchService; + @Getter private OperationContext operationContext; private static final String ENTITY_NAME = "testEntity"; @@ -84,6 +88,10 @@ public void setup() throws RemoteInvocationException, URISyntaxException { .thenReturn(new SnapshotEntityRegistry(new Snapshot())); when(aspectRetriever.getLatestAspectObjects(any(), any())).thenReturn(Map.of()); indexConvention = new IndexConventionImpl("search_service_test"); + operationContext = + TestOperationContexts.systemContextNoSearchAuthorization( + aspectRetriever.getEntityRegistry(), indexConvention) + .asSession(Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildEntitySearchService(); elasticSearchService.configure(); @@ -156,17 +164,24 @@ private void clearCache() { public void testSearchService() throws Exception { SearchResult searchResult = searchService.searchAcrossEntities( + getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), ImmutableList.of(ENTITY_NAME), "test", null, null, 0, - 10, - new SearchFlags().setFulltext(true).setSkipCache(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); searchResult = searchService.searchAcrossEntities( - ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ImmutableList.of(), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -181,7 +196,13 @@ public void testSearchService() throws Exception { searchResult = searchService.searchAcrossEntities( - ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ImmutableList.of(), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -197,12 +218,20 @@ public void testSearchService() throws Exception { searchResult = searchService.searchAcrossEntities( - ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ImmutableList.of(), + "'test2'", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); clearCache(); - long docCount = elasticSearchService.docCount(ENTITY_NAME); + long docCount = + elasticSearchService.docCount( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME); assertEquals(docCount, 2L); elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); @@ -210,7 +239,13 @@ public void testSearchService() throws Exception { syncAfterWrite(getBulkProcessor()); searchResult = searchService.searchAcrossEntities( - ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ImmutableList.of(), + "'test2'", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); } @@ -241,13 +276,13 @@ public void testAdvancedSearchOr() throws Exception { SearchResult searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -286,13 +321,13 @@ public void testAdvancedSearchOr() throws Exception { searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 2); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(1).getEntity(), urn2); @@ -326,13 +361,13 @@ public void testAdvancedSearchSoftDelete() throws Exception { SearchResult searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -374,13 +409,13 @@ public void testAdvancedSearchSoftDelete() throws Exception { searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -405,13 +440,13 @@ public void testAdvancedSearchNegated() throws Exception { SearchResult searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -453,13 +488,13 @@ public void testAdvancedSearchNegated() throws Exception { searchResult = searchService.searchAcrossEntities( + getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ImmutableList.of(), "test", filterWithCondition, null, 0, - 10, - new SearchFlags().setFulltext(true)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn3); clearCache(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index 40ccc8dfb5047..ae91f6a8876e9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; @@ -25,6 +24,8 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; @@ -60,12 +61,16 @@ public abstract class TestEntityTestBase extends AbstractTestNGSpringContextTest private IndexConvention indexConvention; private SettingsBuilder settingsBuilder; private ElasticSearchService elasticSearchService; + private OperationContext opContext; private static final String ENTITY_NAME = "testEntity"; @BeforeClass public void setup() { indexConvention = new IndexConventionImpl("es_service_test"); + opContext = + TestOperationContexts.systemContextNoSearchAuthorization( + aspectRetriever.getEntityRegistry(), indexConvention); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildService(); elasticSearchService.configure(); @@ -122,14 +127,35 @@ private ElasticSearchService buildService() { public void testElasticSearchServiceStructuredQuery() throws Exception { SearchResult searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + List.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); - BrowseResult browseResult = elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); + BrowseResult browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 0); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 0); assertEquals( elasticSearchService - .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .aggregateByValue( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textField", + null, + 10) .size(), 0); @@ -145,30 +171,57 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + List.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); searchResult = elasticSearchService.search( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), List.of(ENTITY_NAME), "foreignKey:Node", null, null, 0, - 10, - new SearchFlags().setFulltext(false)); + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - browseResult = elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); + browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "a"); - browseResult = elasticSearchService.browse(ENTITY_NAME, "/a", null, 0, 10); + browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "/a", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 1); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 1); assertEquals( elasticSearchService.aggregateByValue( - ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textFieldOverride", + null, + 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -182,20 +235,47 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + List.of(ENTITY_NAME), + "test2", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); - browseResult = elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); + browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 2); assertEquals(browseResult.getGroups().get(0).getName(), "a"); assertEquals(browseResult.getGroups().get(1).getName(), "b"); - browseResult = elasticSearchService.browse(ENTITY_NAME, "/a", null, 0, 10); + browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "/a", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 2); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 2); assertEquals( elasticSearchService.aggregateByValue( - ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textFieldOverride", + null, + 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); @@ -203,14 +283,35 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { syncAfterWrite(getBulkProcessor()); searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + List.of(ENTITY_NAME), + "test2", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); - browseResult = elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); + browseResult = + elasticSearchService.browse( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ENTITY_NAME, + "", + null, + 0, + 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 0); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 0); assertEquals( elasticSearchService - .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .aggregateByValue( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textField", + null, + 10) .size(), 0); } @@ -219,7 +320,13 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { public void testElasticSearchServiceFulltext() throws Exception { SearchResult searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + List.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); @@ -234,14 +341,27 @@ public void testElasticSearchServiceFulltext() throws Exception { searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + List.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 1); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 1); assertEquals( elasticSearchService.aggregateByValue( - ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textFieldOverride", + null, + 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -255,14 +375,27 @@ public void testElasticSearchServiceFulltext() throws Exception { searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + List.of(ENTITY_NAME), + "test2", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 2); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 2); assertEquals( elasticSearchService.aggregateByValue( - ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textFieldOverride", + null, + 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); @@ -270,13 +403,27 @@ public void testElasticSearchServiceFulltext() throws Exception { syncAfterWrite(getBulkProcessor()); searchResult = elasticSearchService.search( - List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + opContext.withSearchFlags(flags -> flags.setFulltext(true)), + List.of(ENTITY_NAME), + "test2", + null, + null, + 0, + 10); assertEquals(searchResult.getNumEntities().intValue(), 0); - assertEquals(elasticSearchService.docCount(ENTITY_NAME), 0); + assertEquals( + elasticSearchService.docCount( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), ENTITY_NAME), + 0); assertEquals( elasticSearchService - .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .aggregateByValue( + opContext.withSearchFlags(flags -> flags.setFulltext(false)), + ImmutableList.of(ENTITY_NAME), + "textField", + null, + 10) .size(), 0); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java index 175c48e198185..fc60119f77512 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java @@ -1,17 +1,21 @@ package com.linkedin.metadata.search.cache; +import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import com.google.common.collect.Streams; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.AggregationMetadataArray; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -26,42 +30,44 @@ public class CacheableSearcherTest { @Test public void testCacheableSearcherWhenEmpty() { + OperationContext opContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); CacheableSearcher emptySearcher = new CacheableSearcher<>( cacheManager.getCache("emptySearcher"), 10, this::getEmptySearchResult, CacheableSearcher.QueryPagination::getFrom, - null, true); - assertTrue(emptySearcher.getSearchResults(0, 0).getEntities().isEmpty()); - assertTrue(emptySearcher.getSearchResults(0, 10).getEntities().isEmpty()); - assertTrue(emptySearcher.getSearchResults(5, 10).getEntities().isEmpty()); + assertTrue(emptySearcher.getSearchResults(opContext, 0, 0).getEntities().isEmpty()); + assertTrue(emptySearcher.getSearchResults(opContext, 0, 10).getEntities().isEmpty()); + assertTrue(emptySearcher.getSearchResults(opContext, 5, 10).getEntities().isEmpty()); } @Test public void testCacheableSearcherWithFixedNumResults() { + OperationContext opContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); CacheableSearcher fixedBatchSearcher = new CacheableSearcher<>( cacheManager.getCache("fixedBatchSearcher"), 10, qs -> getSearchResult(qs, 10), CacheableSearcher.QueryPagination::getFrom, - null, true); - SearchResult result = fixedBatchSearcher.getSearchResults(0, 0); + SearchResult result = fixedBatchSearcher.getSearchResults(opContext, 0, 0); assertTrue(result.getEntities().isEmpty()); assertEquals(result.getNumEntities().intValue(), 1000); - result = fixedBatchSearcher.getSearchResults(0, 10); + result = fixedBatchSearcher.getSearchResults(opContext, 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); - result = fixedBatchSearcher.getSearchResults(5, 10); + result = fixedBatchSearcher.getSearchResults(opContext, 5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( @@ -72,27 +78,28 @@ public void testCacheableSearcherWithFixedNumResults() { @Test public void testCacheableSearcherWithVariableNumResults() { + OperationContext opContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); CacheableSearcher variableBatchSearcher = new CacheableSearcher<>( cacheManager.getCache("variableBatchSearcher"), 10, qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, true); - SearchResult result = variableBatchSearcher.getSearchResults(0, 0); + SearchResult result = variableBatchSearcher.getSearchResults(opContext, 0, 0); assertTrue(result.getEntities().isEmpty()); assertEquals(result.getNumEntities().intValue(), 1000); - result = variableBatchSearcher.getSearchResults(0, 10); + result = variableBatchSearcher.getSearchResults(opContext, 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); - result = variableBatchSearcher.getSearchResults(5, 10); + result = variableBatchSearcher.getSearchResults(opContext, 5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( @@ -100,7 +107,7 @@ public void testCacheableSearcherWithVariableNumResults() { Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) .collect(Collectors.toList())); - result = variableBatchSearcher.getSearchResults(5, 100); + result = variableBatchSearcher.getSearchResults(opContext, 5, 100); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 100); assertEquals( @@ -116,17 +123,18 @@ public void testCacheableSearcherWithVariableNumResults() { @Test public void testCacheableSearcherEnabled() { + OperationContext opContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); // Verify cache is not interacted with when cache disabled - Cache mockCache = Mockito.mock(Cache.class); + Cache mockCache = mock(Cache.class); CacheableSearcher cacheDisabled = new CacheableSearcher<>( mockCache, 10, qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, false); - SearchResult result = cacheDisabled.getSearchResults(0, 10); + SearchResult result = cacheDisabled.getSearchResults(opContext, 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( @@ -142,9 +150,10 @@ public void testCacheableSearcherEnabled() { 10, qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(true), true); - result = skipCache.getSearchResults(0, 10); + result = + skipCache.getSearchResults( + opContext.withSearchFlags(flags -> flags.setSkipCache(true)), 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( @@ -161,9 +170,9 @@ public void testCacheableSearcherEnabled() { 10, qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, true); - result = nullFlags.getSearchResults(0, 10); + result = + nullFlags.getSearchResults(opContext.withSearchFlags(flags -> new SearchFlags()), 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( @@ -180,9 +189,10 @@ public void testCacheableSearcherEnabled() { 10, qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(false), true); - result = useCache.getSearchResults(0, 10); + result = + useCache.getSearchResults( + opContext.withSearchFlags(flags -> flags.setSkipCache(false)), 0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); assertEquals( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java index 1fed3380a342d..4c9c69a52debd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.search.elasticsearch; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; @@ -27,6 +28,8 @@ public class LineageDataFixtureElasticSearchTest extends LineageDataFixtureTestB @Qualifier("searchLineageLineageSearchService") protected LineageSearchService lineageService; + @Autowired protected EntityRegistry entityRegistry; + @NotNull @Override protected LineageSearchService getLineageService() { @@ -43,4 +46,9 @@ protected SearchService getSearchService() { public void initTest() { AssertJUnit.assertNotNull(lineageService); } + + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index 4c125065deb4d..d384d8275bdd7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.search.fixtures; import static com.linkedin.metadata.Constants.*; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; import static org.testng.Assert.*; import static org.testng.AssertJUnit.assertNotNull; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; @@ -21,6 +21,9 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import io.datahubproject.test.search.SearchTestUtils; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -48,6 +51,12 @@ public abstract class GoldenTestBase extends AbstractTestNGSpringContextTests { @Nonnull protected abstract SearchService getSearchService(); + @Nonnull + protected OperationContext getOperationContext() { + return TestOperationContexts.userContextNoSearchAuthorization( + getEntityRegistry(), Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + } + @Test public void testNameMatchPetProfiles() { /* @@ -56,8 +65,11 @@ public void testNameMatchPetProfiles() { assertNotNull(getSearchService()); assertNotNull(getEntityRegistry()); SearchResult searchResult = - searchAcrossCustomEntities( - getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); + searchAcrossEntities( + getOperationContext(), + getSearchService(), + SEARCHABLE_LONGTAIL_ENTITIES, + "pet profiles"); assertTrue(searchResult.getEntities().size() >= 2); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); @@ -73,7 +85,8 @@ public void testNameMatchPetProfile() { */ assertNotNull(getSearchService()); SearchResult searchResult = - searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); + searchAcrossEntities( + getOperationContext(), getSearchService(), SEARCHABLE_LONGTAIL_ENTITIES, "pet profile"); assertTrue(searchResult.getEntities().size() >= 2); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); @@ -90,7 +103,8 @@ public void testGlossaryTerms() { */ assertNotNull(getSearchService()); SearchResult searchResult = - searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); + searchAcrossEntities( + getOperationContext(), getSearchService(), SEARCHABLE_LONGTAIL_ENTITIES, "ReturnRate"); SearchEntityArray entities = searchResult.getEntities(); assertTrue(searchResult.getEntities().size() >= 4); MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); @@ -113,7 +127,10 @@ public void testNameMatchPartiallyQualified() { assertNotNull(getSearchService()); SearchResult searchResult = searchAcrossEntities( - getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); + getOperationContext(), + getSearchService(), + SEARCHABLE_LONGTAIL_ENTITIES, + "analytics.pet_details"); assertTrue(searchResult.getEntities().size() >= 2); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); @@ -133,7 +150,10 @@ public void testNameMatchCollaborativeActionitems() { assertNotNull(getSearchService()); SearchResult searchResult = searchAcrossEntities( - getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); + getOperationContext(), + getSearchService(), + SEARCHABLE_LONGTAIL_ENTITIES, + "collaborative actionitems"); assertTrue(searchResult.getEntities().size() >= 2); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); @@ -158,7 +178,11 @@ public void testNameMatchCustomerOrders() { */ assertNotNull(getSearchService()); SearchResult searchResult = - searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); + searchAcrossEntities( + getOperationContext(), + getSearchService(), + SEARCHABLE_LONGTAIL_ENTITIES, + "customer orders"); assertTrue(searchResult.getEntities().size() >= 2); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); @@ -194,12 +218,13 @@ public void testFilterOnCountField() { .setValue("") .setValues(new StringArray(ImmutableList.of("68")))))))); SearchResult searchResult = - searchAcrossEntities( + SearchTestUtils.facetAcrossEntities( + getOperationContext(), getSearchService(), - "*", SEARCHABLE_LONGTAIL_ENTITIES, - filter, - Collections.singletonList(DATASET_ENTITY_NAME)); + "*", + Collections.singletonList(DATASET_ENTITY_NAME), + filter); assertFalse(searchResult.getEntities().isEmpty()); Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); assertEquals( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index 59942f76744da..6950f62d45263 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -5,12 +5,16 @@ import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; +import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -25,15 +29,27 @@ public abstract class LineageDataFixtureTestBase extends AbstractTestNGSpringCon @Nonnull protected abstract SearchService getSearchService(); + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); + + @Nonnull + protected OperationContext getOperationContext() { + return TestOperationContexts.userContextNoSearchAuthorization( + getEntityRegistry(), Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + } + @Test public void testFixtureInitialization() { assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + SearchResult noResult = + searchAcrossEntities(getOperationContext(), getSearchService(), "no results"); assertEquals(noResult.getEntities().size(), 0); SearchResult result = searchAcrossEntities( - getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); + getOperationContext(), + getSearchService(), + "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); assertEquals(result.getEntities().size(), 1); assertEquals( @@ -41,7 +57,8 @@ public void testFixtureInitialization() { "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); LineageSearchResult lineageResult = - lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); + lineage( + getOperationContext(), getLineageService(), result.getEntities().get(0).getEntity(), 1); assertEquals(lineageResult.getEntities().size(), 10); } @@ -60,7 +77,11 @@ public void testDatasetLineage() throws URISyntaxException { hopsExpectedResultsStream.forEach( hopsExpectedResults -> { LineageSearchResult lineageResult = - lineage(getLineageService(), testUrn, hopsExpectedResults.getFirst()); + lineage( + getOperationContext(), + getLineageService(), + testUrn, + hopsExpectedResults.getFirst()); assertEquals(lineageResult.getEntities().size(), hopsExpectedResults.getSecond()); }); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index 4742115b16e1b..deda14c2216b1 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -14,6 +14,7 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; @@ -28,7 +29,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -45,6 +45,8 @@ import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -86,6 +88,12 @@ public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringCont @Nonnull protected abstract RestHighLevelClient getSearchClient(); + @Nonnull + protected OperationContext getOperationContext() { + return TestOperationContexts.userContextNoSearchAuthorization( + getEntityRegistry(), Authorizer.EMPTY, AUTHENTICATION); + } + @Test public void testSearchFieldConfig() throws IOException { /* @@ -272,10 +280,12 @@ public void testDatasetHasTags() throws IOException { @Test public void testFixtureInitialization() { assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + SearchResult noResult = + searchAcrossEntities(getOperationContext(), getSearchService(), "no results"); assertEquals(0, noResult.getEntities().size()); - final SearchResult result = searchAcrossEntities(getSearchService(), "test"); + final SearchResult result = + searchAcrossEntities(getOperationContext(), getSearchService(), "test"); Map expectedTypes = Map.of( @@ -333,7 +343,8 @@ public void testDataPlatform() { expected.forEach( (key, value) -> { - SearchResult result = searchAcrossEntities(getSearchService(), key); + SearchResult result = + searchAcrossEntities(getOperationContext(), getSearchService(), key); assertEquals( result.getEntities().size(), value.intValue(), @@ -354,13 +365,17 @@ public void testUrn() { .forEach( query -> assertTrue( - searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, + searchAcrossEntities(getOperationContext(), getSearchService(), query) + .getEntities() + .size() + >= 1, String.format("Unexpected >1 urn result for `%s`", query))); } @Test public void testExactTable() { - SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); + SearchResult results = + searchAcrossEntities(getOperationContext(), getSearchService(), "stg_customers"); assertEquals( results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); assertEquals( @@ -380,7 +395,8 @@ public void testStemming() { testSet -> { Integer expectedResults = null; for (String testQuery : testSet) { - SearchResult results = searchAcrossEntities(getSearchService(), testQuery); + SearchResult results = + searchAcrossEntities(getOperationContext(), getSearchService(), testQuery); assertTrue( results.hasEntities() && !results.getEntities().isEmpty(), @@ -402,7 +418,7 @@ public void testStemmingOverride() throws IOException { Set results = testSet.stream() - .map(test -> searchAcrossEntities(getSearchService(), test)) + .map(test -> searchAcrossEntities(getOperationContext(), getSearchService(), test)) .collect(Collectors.toSet()); results.forEach( @@ -456,7 +472,8 @@ public void testDelimitedSynonym() throws IOException { testSet.stream() .map( q -> { - SearchResult result = searchAcrossEntities(getSearchService(), q); + SearchResult result = + searchAcrossEntities(getOperationContext(), getSearchService(), q); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), "Expected search results for: " + q); @@ -592,7 +609,8 @@ public void testUrnSynonym() throws IOException { testSet.stream() .map( query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = + searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), "Expected search results for: " + query); @@ -824,7 +842,8 @@ public void testChartAutoComplete() throws InterruptedException, IOException { .forEach( query -> { try { - AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); + AutoCompleteResults result = + autocomplete(getOperationContext(), new ChartType(getEntityClient()), query); assertTrue( result.getEntities().size() == 2, String.format( @@ -853,7 +872,7 @@ public void testDatasetAutoComplete() { query -> { try { AutoCompleteResults result = - autocomplete(new DatasetType(getEntityClient()), query); + autocomplete(getOperationContext(), new DatasetType(getEntityClient()), query); assertTrue( result.getEntities().size() >= 1, String.format( @@ -879,7 +898,8 @@ public void testContainerAutoComplete() { query -> { try { AutoCompleteResults result = - autocomplete(new ContainerType(getEntityClient()), query); + autocomplete( + getOperationContext(), new ContainerType(getEntityClient()), query); assertTrue( result.getEntities().size() >= 1, String.format( @@ -898,7 +918,8 @@ public void testGroupAutoComplete() { query -> { try { AutoCompleteResults result = - autocomplete(new CorpGroupType(getEntityClient()), query); + autocomplete( + getOperationContext(), new CorpGroupType(getEntityClient()), query); assertTrue( result.getEntities().size() == 1, String.format( @@ -917,7 +938,8 @@ public void testUserAutoComplete() { query -> { try { AutoCompleteResults result = - autocomplete(new CorpUserType(getEntityClient(), null), query); + autocomplete( + getOperationContext(), new CorpUserType(getEntityClient(), null), query); assertTrue( result.getEntities().size() >= 1, String.format( @@ -953,7 +975,9 @@ public void testSmokeTestQueries() { .collect( Collectors.toMap( Map.Entry::getKey, - entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + entry -> + searchAcrossEntities( + getOperationContext(), getSearchService(), entry.getKey()))); results.forEach( (key, value) -> { @@ -978,7 +1002,9 @@ public void testSmokeTestQueries() { .collect( Collectors.toMap( Map.Entry::getKey, - entry -> searchStructured(getSearchService(), entry.getKey()))); + entry -> + searchStructured( + getOperationContext(), getSearchService(), entry.getKey()))); results.forEach( (key, value) -> { @@ -1033,7 +1059,8 @@ public void testUnderscore() throws IOException { @Test public void testFacets() { Set expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); + SearchResult testResult = + searchAcrossEntities(getOperationContext(), getSearchService(), "cypress"); expectedFacets.forEach( facet -> { assertTrue( @@ -1076,7 +1103,8 @@ public void testFacets() { public void testNestedAggregation() { Set expectedFacets = Set.of("platform"); SearchResult testResult = - searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + facetAcrossEntities( + getOperationContext(), getSearchService(), "cypress", List.copyOf(expectedFacets)); assertEquals(testResult.getMetadata().getAggregations().size(), 1); expectedFacets.forEach( facet -> { @@ -1093,7 +1121,8 @@ public void testNestedAggregation() { expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); SearchResult testResult2 = - searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + facetAcrossEntities( + getOperationContext(), getSearchService(), "cypress", List.copyOf(expectedFacets)); assertEquals(testResult2.getMetadata().getAggregations().size(), 4); expectedFacets.forEach( facet -> { @@ -1140,7 +1169,8 @@ public void testNestedAggregation() { expectedFacets = Set.of("platform", "typeNames", "entity"); SearchResult testResult3 = - searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + facetAcrossEntities( + getOperationContext(), getSearchService(), "cypress", List.copyOf(expectedFacets)); assertEquals(testResult3.getMetadata().getAggregations().size(), 4); expectedFacets.forEach( facet -> { @@ -1170,7 +1200,8 @@ public void testNestedAggregation() { String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); expectedFacets = Set.of(singleNestedFacet); SearchResult testResultSingleNested = - searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + facetAcrossEntities( + getOperationContext(), getSearchService(), "cypress", List.copyOf(expectedFacets)); assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); Map expectedNestedFacetCounts = new HashMap<>(); expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); @@ -1192,7 +1223,8 @@ public void testNestedAggregation() { expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); SearchResult testResultNested = - searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + facetAcrossEntities( + getOperationContext(), getSearchService(), "cypress", List.copyOf(expectedFacets)); assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); expectedFacets.forEach( facet -> { @@ -1304,7 +1336,8 @@ public void testScrollAcrossEntities() throws IOException { int totalResults = 0; String scrollId = null; do { - ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); + ScrollResult result = + scroll(getOperationContext(), getSearchService(), query, batchSize, scrollId); int numResults = result.hasEntities() ? result.getEntities().size() : 0; assertTrue(numResults <= batchSize); totalResults += numResults; @@ -1317,13 +1350,19 @@ public void testScrollAcrossEntities() throws IOException { @Test public void testSearchAcrossMultipleEntities() { String query = "logging_events"; - SearchResult result = search(getSearchService(), query); + SearchResult result = search(getOperationContext(), getSearchService(), query); assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); + result = + search( + getOperationContext(), + getSearchService(), + List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), + query); assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); + result = search(getOperationContext(), getSearchService(), List.of(DATASET_ENTITY_NAME), query); assertEquals((int) result.getNumEntities(), 4); - result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); + result = + search(getOperationContext(), getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); assertEquals((int) result.getNumEntities(), 4); } @@ -1387,7 +1426,8 @@ public void testFragmentUrns() { testSet.forEach( query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = + searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1410,7 +1450,8 @@ public void testPlatformTest() { fieldName -> { final String query = String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); - SearchResult result = searchStructured(getSearchService(), query); + SearchResult result = + searchStructured(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1462,14 +1503,14 @@ public void testPlatformTest() { try { return getEntityClient() .search( + getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(fulltextFlag)), "dataset", "*", filter, null, 0, - 100, - AUTHENTICATION, - new SearchFlags().setFulltext(fulltextFlag)); + 100); } catch (RemoteInvocationException e) { throw new RuntimeException(e); } @@ -1492,7 +1533,7 @@ public void testPlatformTest() { @Test public void testStructQueryFieldMatch() { String query = STRUCTURED_QUERY_PREFIX + "name: customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1507,7 +1548,7 @@ public void testStructQueryFieldMatch() { @Test public void testStructQueryFieldPrefixMatch() { String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1522,7 +1563,7 @@ public void testStructQueryFieldPrefixMatch() { @Test public void testStructQueryCustomPropertiesKeyPrefix() { String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1537,7 +1578,7 @@ public void testStructQueryCustomPropertiesKeyPrefix() { @Test public void testStructQueryCustomPropertiesMatch() { String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1563,7 +1604,9 @@ public void testCustomPropertiesQuoted() { .collect( Collectors.toMap( Map.Entry::getKey, - entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + entry -> + searchAcrossEntities( + getOperationContext(), getSearchService(), entry.getKey()))); results.forEach( (key, value) -> { @@ -1581,7 +1624,7 @@ public void testCustomPropertiesQuoted() { @Test public void testStructQueryFieldPaths() { String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1598,7 +1641,7 @@ public void testStructQueryBoolean() { String query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1610,7 +1653,7 @@ public void testStructQueryBoolean() { assertEquals(result.getEntities().size(), 2); query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1622,7 +1665,7 @@ public void testStructQueryBoolean() { assertEquals(result.getEntities().size(), 1); query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1637,7 +1680,7 @@ public void testStructQueryBoolean() { @Test public void testStructQueryBrowsePaths() { String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1652,7 +1695,7 @@ public void testStructQueryBrowsePaths() { @Test public void testOr() { String query = "stg_customers | logging_events"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1662,7 +1705,7 @@ public void testOr() { assertEquals(result.getEntities().size(), 9); query = "stg_customers"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1672,7 +1715,7 @@ public void testOr() { assertEquals(result.getEntities().size(), 1); query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1685,7 +1728,7 @@ public void testOr() { @Test public void testNegate() { String query = "logging_events -bckp"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1695,7 +1738,7 @@ public void testNegate() { assertEquals(result.getEntities().size(), 7); query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1708,7 +1751,7 @@ public void testNegate() { @Test public void testPrefix() { String query = "bigquery"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1718,7 +1761,7 @@ public void testPrefix() { assertEquals(result.getEntities().size(), 8); query = "big*"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1731,7 +1774,7 @@ public void testPrefix() { @Test public void testParens() { String query = "dbt | (bigquery + covid19)"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1741,7 +1784,7 @@ public void testParens() { assertEquals(result.getEntities().size(), 11); query = "dbt"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1751,7 +1794,7 @@ public void testParens() { assertEquals(result.getEntities().size(), 9); query = "bigquery + covid19"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1761,7 +1804,7 @@ public void testParens() { assertEquals(result.getEntities().size(), 2); query = "bigquery"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1771,7 +1814,7 @@ public void testParens() { assertEquals(result.getEntities().size(), 8); query = "covid19"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1784,7 +1827,7 @@ public void testParens() { @Test public void testGram() { String query = "jaffle shop customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1795,7 +1838,7 @@ public void testGram() { "Expected exact match in 1st position"); query = "shop customers source"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1806,7 +1849,7 @@ public void testGram() { "Expected ngram match in 1st position"); query = "jaffle shop stg customers"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1817,7 +1860,7 @@ public void testGram() { "Expected ngram match in 1st position"); query = "jaffle shop transformers customers"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1828,7 +1871,7 @@ public void testGram() { "Expected ngram match in 1st position"); query = "shop raw customers"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1842,7 +1885,7 @@ public void testGram() { @Test public void testPrefixVsExact() { String query = "\"customers\""; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1865,7 +1908,7 @@ public void testPrefixVsExactCaseSensitivity() { List insensitiveExactMatches = List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); for (String query : insensitiveExactMatches) { - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), @@ -1885,7 +1928,7 @@ public void testPrefixVsExactCaseSensitivity() { @Test public void testColumnExactMatch() { String query = "unit_data"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + SearchResult result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1902,7 +1945,7 @@ public void testColumnExactMatch() { "Expected table name exact match first"); query = "special_column_only_present_here_info"; - result = searchAcrossEntities(getSearchService(), query); + result = searchAcrossEntities(getOperationContext(), getSearchService(), query); assertTrue( result.hasEntities() && !result.getEntities().isEmpty(), String.format("%s - Expected search results", query)); @@ -1927,13 +1970,14 @@ public void testSortOrdering() { SearchResult result = getSearchService() .searchAcrossEntities( + getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), SEARCHABLE_ENTITIES, query, null, criterion, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true), null); assertTrue( result.getEntities().size() > 2, @@ -1957,11 +2001,11 @@ public void testFilterOnHasValuesField() { .setValues(new StringArray(ImmutableList.of("true")))))))); SearchResult searchResult = searchAcrossEntities( + getOperationContext(), getSearchService(), + Collections.singletonList(DATASET_ENTITY_NAME), "*", - SEARCHABLE_ENTITIES, - filter, - Collections.singletonList(DATASET_ENTITY_NAME)); + filter); assertEquals(searchResult.getEntities().size(), 8); } @@ -1982,11 +2026,11 @@ public void testFilterOnNumValuesField() { .setValues(new StringArray(ImmutableList.of("1")))))))); SearchResult searchResult = searchAcrossEntities( + getOperationContext(), getSearchService(), + Collections.singletonList(DATA_JOB_ENTITY_NAME), "*", - SEARCHABLE_ENTITIES, - filter, - Collections.singletonList(DATA_JOB_ENTITY_NAME)); + filter); assertEquals(searchResult.getEntities().size(), 4); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java index cc17e3287544c..c67920a8ec115 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.search.opensearch; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; @@ -27,6 +28,10 @@ public class LineageDataFixtureOpenSearchTest extends LineageDataFixtureTestBase @Qualifier("searchLineageLineageSearchService") protected LineageSearchService lineageService; + @Autowired + @Qualifier("entityRegistry") + protected EntityRegistry entityRegistry; + @NotNull @Override protected LineageSearchService getLineageService() { @@ -43,4 +48,9 @@ protected SearchService getSearchService() { public void initTest() { AssertJUnit.assertNotNull(lineageService); } + + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java index b43d1556a6882..264aa280cac90 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static com.linkedin.metadata.utils.SearchUtil.ES_INDEX_FIELD; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -11,6 +12,7 @@ import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.LongMap; import com.linkedin.data.template.StringArray; @@ -29,16 +31,20 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.search.opensearch.SearchDAOOpenSearchTest; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import lombok.Getter; import org.opensearch.action.explain.ExplainResponse; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; @@ -57,11 +63,18 @@ public abstract class SearchDAOTestBase extends AbstractTestNGSpringContextTests protected AspectRetriever aspectRetriever; + @Getter protected OperationContext operationContext; + @BeforeClass public void setup() throws RemoteInvocationException, URISyntaxException { aspectRetriever = mock(AspectRetriever.class); when(aspectRetriever.getEntityRegistry()).thenReturn(getEntityRegistry()); when(aspectRetriever.getLatestAspectObjects(any(), any())).thenReturn(Map.of()); + operationContext = + TestOperationContexts.userContextNoSearchAuthorization( + aspectRetriever.getEntityRegistry(), + Authorizer.EMPTY, + TestOperationContexts.TEST_USER_AUTH); } @Test @@ -124,7 +137,7 @@ public void testTransformFilterForEntitiesWithChanges() { .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) .setNegated(false) .setCondition(Condition.EQUAL) - .setField("_index"); + .setField(ES_INDEX_FIELD); Filter expectedNewFilter = new Filter() @@ -168,7 +181,7 @@ public void testTransformFilterForEntitiesWithUnderscore() { .setValues(new StringArray(ImmutableList.of("smpldat_datajobindex_v2"))) .setNegated(false) .setCondition(Condition.EQUAL) - .setField("_index"); + .setField(ES_INDEX_FIELD); Filter expectedNewFilter = new Filter() @@ -220,7 +233,7 @@ public void testTransformFilterForEntitiesWithSomeChanges() { .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) .setNegated(false) .setCondition(Condition.EQUAL) - .setField("_index"); + .setField(ES_INDEX_FIELD); Filter expectedNewFilter = new Filter() @@ -462,6 +475,8 @@ public void testExplain() { .setAspectRetriever(aspectRetriever); ExplainResponse explainResponse = searchDAO.explain( + getOperationContext() + .withSearchFlags(flags -> ElasticSearchService.DEFAULT_SERVICE_SEARCH_FLAGS), "*", "urn:li:dataset:(urn:li:dataPlatform:bigquery,bigquery-public-data.covid19_geotab_mobility_impact." + "ca_border_wait_times,PROD)", @@ -470,7 +485,6 @@ public void testExplain() { null, null, null, - null, 10, null); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java index bb37fb3f3b206..6ec4f4e589f35 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java @@ -6,7 +6,10 @@ import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; import java.util.Map; import org.opensearch.action.search.SearchRequest; @@ -22,11 +25,14 @@ public class AutocompleteRequestHandlerTest { private AutocompleteRequestHandler handler = AutocompleteRequestHandler.getBuilder( TestEntitySpecBuilder.getSpec(), mock(AspectRetriever.class)); + private OperationContext mockOpContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); @Test public void testDefaultAutocompleteRequest() { // When field is null - SearchRequest autocompleteRequest = handler.getSearchRequest("input", null, null, 10); + SearchRequest autocompleteRequest = + handler.getSearchRequest(mockOpContext, "input", null, null, 10); SearchSourceBuilder sourceBuilder = autocompleteRequest.source(); assertEquals(sourceBuilder.size(), 10); BoolQueryBuilder query = (BoolQueryBuilder) sourceBuilder.query(); @@ -64,7 +70,8 @@ public void testDefaultAutocompleteRequest() { @Test public void testAutocompleteRequestWithField() { // When field is null - SearchRequest autocompleteRequest = handler.getSearchRequest("input", "field", null, 10); + SearchRequest autocompleteRequest = + handler.getSearchRequest(mockOpContext, "input", "field", null, 10); SearchSourceBuilder sourceBuilder = autocompleteRequest.source(); assertEquals(sourceBuilder.size(), 10); BoolQueryBuilder query = (BoolQueryBuilder) sourceBuilder.query(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index 14cc9e47913c1..be128d7855f39 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -12,7 +12,6 @@ import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.WordGramConfiguration; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -20,6 +19,7 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.util.ArrayList; import java.util.Collection; @@ -51,6 +51,8 @@ public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { @Autowired private AspectRetriever aspectRetriever; + @Autowired private OperationContext operationContext; + public static SearchConfiguration testQueryConfig; static { @@ -106,12 +108,13 @@ public void testSearchRequestHandlerHighlightingTurnedOff() { TestEntitySpecBuilder.getSpec(), testQueryConfig, null, aspectRetriever); SearchRequest searchRequest = requestHandler.getSearchRequest( + operationContext.withSearchFlags( + flags -> flags.setFulltext(false).setSkipHighlighting(true)), "testQuery", null, null, 0, 10, - new SearchFlags().setFulltext(false).setSkipHighlighting(true), null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); @@ -146,7 +149,13 @@ public void testSearchRequestHandler() { TestEntitySpecBuilder.getSpec(), testQueryConfig, null, aspectRetriever); SearchRequest searchRequest = requestHandler.getSearchRequest( - "testQuery", null, null, 0, 10, new SearchFlags().setFulltext(false), null); + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), + "testQuery", + null, + null, + 0, + 10, + null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); @@ -204,12 +213,12 @@ public void testAggregationsInSearch() { String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); SearchRequest searchRequest = requestHandler.getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(true)), "*", null, null, 0, 10, - new SearchFlags().setFulltext(true), List.of( "textFieldOverride", "_entityType", @@ -229,12 +238,12 @@ public void testAggregationsInSearch() { .size(testQueryConfig.getMaxTermBucketSize()); AggregationBuilder expectedEntityTypeAggregationBuilder = AggregationBuilders.terms("_entityType") - .field("_index") + .field(ES_INDEX_FIELD) .size(testQueryConfig.getMaxTermBucketSize()) .minDocCount(0); AggregationBuilder expectedNestedAggregationBuilder = AggregationBuilders.terms(nestedAggString) - .field("_index") + .field(ES_INDEX_FIELD) .size(testQueryConfig.getMaxTermBucketSize()) .minDocCount(0) .subAggregation( @@ -305,6 +314,7 @@ private BoolQueryBuilder constructFilterQuery( (BoolQueryBuilder) requestHandler .getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), "testQuery", filterWithoutRemovedCondition, null, @@ -312,7 +322,6 @@ private BoolQueryBuilder constructFilterQuery( null, "5m", 10, - new SearchFlags().setFulltext(false), null) .source() .query(); @@ -321,12 +330,12 @@ private BoolQueryBuilder constructFilterQuery( (BoolQueryBuilder) requestHandler .getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), "testQuery", filterWithoutRemovedCondition, null, 0, 10, - new SearchFlags().setFulltext(false), null) .source() .query(); @@ -380,6 +389,7 @@ private BoolQueryBuilder constructRemovedQuery( (BoolQueryBuilder) requestHandler .getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), "testQuery", filterWithRemovedCondition, null, @@ -387,7 +397,6 @@ private BoolQueryBuilder constructRemovedQuery( null, "5m", 10, - new SearchFlags().setFulltext(false), null) .source() .query(); @@ -396,12 +405,12 @@ private BoolQueryBuilder constructRemovedQuery( (BoolQueryBuilder) requestHandler .getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), "testQuery", filterWithRemovedCondition, null, 0, 10, - new SearchFlags().setFulltext(false), null) .source() .query(); @@ -621,7 +630,11 @@ public void testBrowsePathQueryFilter() { filter.setOr(conjunctiveCriterionArray); BoolQueryBuilder test = - SearchRequestHandler.getFilterQuery(filter, new HashMap<>(), aspectRetriever); + SearchRequestHandler.getFilterQuery( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), + filter, + new HashMap<>(), + aspectRetriever); assertEquals(test.should().size(), 1); @@ -649,7 +662,14 @@ private BoolQueryBuilder getQuery(final Criterion filterCriterion) { return (BoolQueryBuilder) requestHandler - .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) + .getSearchRequest( + operationContext.withSearchFlags(flags -> flags.setFulltext(false)), + "", + filter, + null, + 0, + 10, + null) .source() .query(); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java new file mode 100644 index 0000000000000..4d97bab1e4214 --- /dev/null +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java @@ -0,0 +1,646 @@ +package com.linkedin.metadata.search.utils; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.aspect.hooks.OwnerTypeMap; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.TestEntityRegistry; +import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import com.linkedin.policy.DataHubActorFilter; +import com.linkedin.policy.DataHubPolicyInfo; +import com.linkedin.policy.DataHubResourceFilter; +import com.linkedin.policy.PolicyMatchCondition; +import com.linkedin.policy.PolicyMatchCriterion; +import com.linkedin.policy.PolicyMatchCriterionArray; +import com.linkedin.policy.PolicyMatchFilter; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import org.opensearch.index.query.QueryBuilders; +import org.testng.annotations.Test; + +public class ESAccessControlUtilTest { + private static final Authentication SYSTEM_AUTH = + new Authentication(new Actor(ActorType.USER, "SYSTEM"), ""); + private static final Urn TEST_GROUP_A = UrnUtils.getUrn("urn:li:corpGroup:a"); + private static final Urn TEST_GROUP_B = UrnUtils.getUrn("urn:li:corpGroup:b"); + private static final Urn TEST_GROUP_C = UrnUtils.getUrn("urn:li:corpGroup:c"); + private static final Urn TEST_USER_A = UrnUtils.getUrn("urn:li:corpUser:a"); + private static final Urn TEST_USER_B = UrnUtils.getUrn("urn:li:corpUser:b"); + private static final Urn TECH_OWNER = + UrnUtils.getUrn("urn:li:ownershipType:__system__technical_owner"); + private static final Urn BUS_OWNER = + UrnUtils.getUrn("urn:li:ownershipType:__system__business_owner"); + private static final Authentication USER_AUTH = + new Authentication(new Actor(ActorType.USER, TEST_USER_A.getId()), ""); + private static final OperationContext ENABLED_CONTEXT = + OperationContext.asSystem( + OperationContextConfig.builder() + .allowSystemAuthentication(true) + .searchAuthorizationConfiguration( + SearchAuthorizationConfiguration.builder().enabled(true).build()) + .build(), + new TestEntityRegistry(), + SYSTEM_AUTH, + IndexConventionImpl.NO_PREFIX); + + @Test + public void testAllUserAllGroup() { + OperationContext allUsers = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext allGroups = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(allUsers), + Optional.empty(), + "Expected no ES filters for all user access without resource restrictions"); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(allGroups), + Optional.empty(), + "Expected no ES filters for all user access without resource restrictions"); + } + + @Test + public void testAllUserAllGroupEntityType() { + OperationContext resourceAllUsersPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("TYPE") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues(new StringArray("dataset", "chart")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext resourceAllGroupsPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("TYPE") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues(new StringArray("dataset", "chart")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "_index", List.of("datasetindex_v2", "chartindex_v2")))) + .minimumShouldMatch(1)), + "Expected index filter for each entity"); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "_index", List.of("datasetindex_v2", "chartindex_v2")))) + .minimumShouldMatch(1)), + "Expected index filter for each entity"); + } + + @Test + public void testAllUserAllGroupUrn() { + OperationContext resourceAllUsersPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("URN") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext resourceAllGroupsPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("URN") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "urn", + List.of( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))) + .minimumShouldMatch(1)), + "Expected filter for each urn"); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "urn", + List.of( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))) + .minimumShouldMatch(1)), + "Expected filter for each urn"); + } + + @Test + public void testAllUserAllGroupTag() { + OperationContext resourceAllUsersPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("TAG") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:tag:pii", "urn:li:tag:prod")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext resourceAllGroupsPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("TAG") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:tag:pii", "urn:li:tag:prod")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "tags.keyword", List.of("urn:li:tag:pii", "urn:li:tag:prod")))) + .minimumShouldMatch(1)), + "Expected filter each tag"); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "tags.keyword", List.of("urn:li:tag:pii", "urn:li:tag:prod")))) + .minimumShouldMatch(1)), + "Expected filter each tag"); + } + + @Test + public void testAllUserAllGroupDomain() { + OperationContext resourceAllUsersPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("DOMAIN") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", + "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext resourceAllGroupsPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("DOMAIN") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", + "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "domains.keyword", + List.of( + "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", + "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))) + .minimumShouldMatch(1)), + "Expected filter each domain"); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .filter( + QueryBuilders.termsQuery( + "domains.keyword", + List.of( + "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", + "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))) + .minimumShouldMatch(1)), + "Expected filter each domain"); + } + + @Test + public void testAllUserAllGroupUnknownField() { + OperationContext resourceAllUsersPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllUsers(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("UNKNOWN FIELD") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues(new StringArray("dataset", "chart")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + OperationContext resourceAllGroupsPolicy = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setAllGroups(true)) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + new PolicyMatchCriterion() + .setField("UNKNOWN FIELD") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues(new StringArray("dataset", "chart")))))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), + Optional.of(QueryBuilders.boolQuery().mustNot(QueryBuilders.matchAllQuery())), + "Expected match-none query when an unknown field is encountered"); + + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), + Optional.of(QueryBuilders.boolQuery().mustNot(QueryBuilders.matchAllQuery())), + "Expected match-none query when an unknown field is encountered"); + } + + @Test + public void testUserGroupOwner() { + OperationContext ownerNoGroupsNoType = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors(new DataHubActorFilter().setResourceOwners(true)) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerNoGroupsNoType), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "owners.keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)), + "Expected user filter for owners without group filter"); + + OperationContext ownerWithGroupsNoType = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwners(true) + .setGroups(new UrnArray(TEST_GROUP_A))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerWithGroupsNoType), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "owners.keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)), + "Expected user AND group filter for owners"); + } + + @Test + public void testUserGroupOwnerTypes() { + OperationContext ownerTypeBusinessNoUserNoGroup = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter().setResourceOwnersTypes(new UrnArray(BUS_OWNER))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerTypeBusinessNoUserNoGroup), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)) + .minimumShouldMatch(1)), + "Expected user filter for business owner via user or group urn"); + + OperationContext ownerTypeBusinessMultiUserNoGroup = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwnersTypes(new UrnArray(BUS_OWNER)) + .setUsers(new UrnArray(List.of(TEST_USER_A, TEST_USER_B)))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerTypeBusinessMultiUserNoGroup), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)) + .minimumShouldMatch(1)), + "Expected user filter for `business owner` by owner user/group A urn (excluding other user/group B)"); + + OperationContext ownerWithGroupsBusTechMultiGroup = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwnersTypes(new UrnArray(BUS_OWNER, TECH_OWNER)) + .setGroups(new UrnArray(TEST_GROUP_A, TEST_GROUP_B))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerWithGroupsBusTechMultiGroup), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(TECH_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)) + .minimumShouldMatch(1)), + "Expected filter for business owner or technical owner by group A (excluding other group B and owner privilege)"); + + OperationContext ownerWithMultiUserMultiGroupsBusTech = + sessionWithPolicy( + Set.of( + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwnersTypes(new UrnArray(BUS_OWNER, TECH_OWNER)) + .setUsers(new UrnArray(List.of(TEST_USER_A, TEST_USER_B))) + .setGroups(new UrnArray(TEST_GROUP_A, TEST_GROUP_B))) + .setPrivileges( + new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + assertEquals( + ESAccessControlUtil.buildAccessControlFilters(ownerWithMultiUserMultiGroupsBusTech), + Optional.of( + QueryBuilders.boolQuery() + .should( + QueryBuilders.boolQuery() + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .should( + QueryBuilders.termsQuery( + "ownerTypes." + + OwnerTypeMap.encodeFieldName(TECH_OWNER.toString()) + + ".keyword", + List.of( + TEST_USER_A.toString().toLowerCase(), + TEST_GROUP_A.toString().toLowerCase(), + TEST_GROUP_C.toString().toLowerCase()))) + .minimumShouldMatch(1)) + .minimumShouldMatch(1)), + "Expected filter for business owner or technical owner by user A and group A (excluding other group B and owner privilege)"); + } + + private static OperationContext sessionWithPolicy(Set policies) { + return sessionWithPolicy(policies, List.of(TEST_GROUP_A, TEST_GROUP_C)); + } + + private static OperationContext sessionWithPolicy( + Set policies, List groups) { + Authorizer mockAuthorizer = mock(Authorizer.class); + when(mockAuthorizer.getActorPolicies(eq(UrnUtils.getUrn(USER_AUTH.getActor().toUrnStr())))) + .thenReturn(policies); + when(mockAuthorizer.getActorGroups(eq(UrnUtils.getUrn(USER_AUTH.getActor().toUrnStr())))) + .thenReturn(groups); + + return ENABLED_CONTEXT.asSession(mockAuthorizer, USER_AUTH); + } +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java index f4e8224254530..2b2d76fb5ec6f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java @@ -20,7 +20,9 @@ private SearchFlags getDefaultSearchFlags() { .setSkipCache(true) .setSkipAggregates(true) .setMaxAggValues(1) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), true); } @@ -54,7 +56,9 @@ public void testApplyDefaultSearchFlags() { .setSkipCache(false) .setSkipAggregates(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), "not empty", defaultFlags), setConvertSchemaFieldsToDatasets( @@ -63,7 +67,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(false) .setSkipCache(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected no default values"); @@ -74,7 +80,9 @@ public void testApplyDefaultSearchFlags() { .setSkipCache(false) .setSkipAggregates(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), null, defaultFlags), setConvertSchemaFieldsToDatasets( @@ -83,7 +91,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(false) .setSkipCache(false) .setMaxAggValues(2) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected skip highlight due to query null query"); for (String query : Set.of("*", "")) { @@ -94,7 +104,9 @@ public void testApplyDefaultSearchFlags() { .setSkipCache(false) .setSkipAggregates(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), query, defaultFlags), setConvertSchemaFieldsToDatasets( @@ -103,7 +115,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(false) .setSkipCache(false) .setMaxAggValues(2) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), String.format("Expected skip highlight due to query string `%s`", query)); } @@ -117,7 +131,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(true) .setSkipCache(true) .setMaxAggValues(1) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except fulltext"); assertEquals( @@ -129,7 +145,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(true) .setSkipCache(false) .setMaxAggValues(1) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipCache"); assertEquals( @@ -141,7 +159,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(false) .setSkipCache(true) .setMaxAggValues(1) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipAggregates"); assertEquals( @@ -153,7 +173,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(true) .setSkipCache(true) .setMaxAggValues(2) - .setSkipHighlighting(true), + .setSkipHighlighting(true) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except maxAggValues"); assertEquals( @@ -165,7 +187,9 @@ public void testApplyDefaultSearchFlags() { .setSkipAggregates(true) .setSkipCache(true) .setMaxAggValues(1) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipHighlighting"); } @@ -184,7 +208,9 @@ public void testImmutableDefaults() throws CloneNotSupportedException { .setSkipCache(false) .setSkipAggregates(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "not empty", defaultFlags), @@ -194,7 +220,9 @@ public void testImmutableDefaults() throws CloneNotSupportedException { .setSkipAggregates(false) .setSkipCache(false) .setMaxAggValues(2) - .setSkipHighlighting(false), + .setSkipHighlighting(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected no default values"); diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 24acb7bbcb4a7..d627bd52cace9 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -37,6 +37,7 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.io.IOException; import java.util.Map; @@ -72,6 +73,8 @@ public class SampleDataFixtureConfiguration { @Autowired private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private OperationContext opContext; + @Bean(name = "sampleDataPrefix") protected String sampleDataPrefix() { return "smpldat"; @@ -297,6 +300,7 @@ private EntityClient entityClientHelper( PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); return new JavaEntityClient( + opContext, new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, preProcessHooks, true), null, entitySearchService, diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index 1c43e623443c1..74971c0b41eb1 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -35,6 +35,7 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import java.io.IOException; @@ -62,6 +63,8 @@ public class SearchLineageFixtureConfiguration { @Autowired private CustomSearchConfiguration customSearchConfiguration; + @Autowired private OperationContext opContext; + @Bean(name = "searchLineagePrefix") protected String indexPrefix() { return "srchlin"; @@ -125,6 +128,7 @@ protected ElasticSearchService entitySearchService( ESWriteDAO writeDAO = new ESWriteDAO( aspectRetriever.getEntityRegistry(), searchClient, indexConvention, bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO) .postConstruct(aspectRetriever); } @@ -232,6 +236,7 @@ protected EntityClient entityClient( PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); return new JavaEntityClient( + opContext, new EntityServiceImpl(null, null, entityRegistry, true, preProcessHooks, true), null, entitySearchService, diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index f3689f9b5d04a..5dceed80f6542 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.graph.LineageDirection; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; @@ -22,6 +21,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @@ -54,94 +54,113 @@ public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws Interrup .collect(Collectors.toList()); } - public static SearchResult searchAcrossEntities(SearchService searchService, String query) { - return searchAcrossEntities(searchService, query, null); - } - - public static SearchResult searchAcrossEntities( - SearchService searchService, String query, @Nullable List facets) { - return searchService.searchAcrossEntities( - SEARCHABLE_ENTITIES, - query, - null, - null, - 0, - 100, - new SearchFlags().setFulltext(true).setSkipCache(true), - facets); + public static SearchResult facetAcrossEntities( + OperationContext opContext, + SearchService searchService, + String query, + @Nullable List facets) { + return facetAcrossEntities(opContext, searchService, SEARCHABLE_ENTITIES, query, facets, null); } - public static SearchResult searchAcrossEntities( + public static SearchResult facetAcrossEntities( + OperationContext opContext, SearchService searchService, + List entityNames, String query, @Nullable List facets, - Filter filter, - List entityNames) { + @Nullable Filter filter) { return searchService.searchAcrossEntities( + opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), entityNames, query, filter, null, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true), facets); } - public static SearchResult searchAcrossCustomEntities( - SearchService searchService, String query, List searchableEntities) { + public static SearchResult searchAcrossEntities( + OperationContext opContext, SearchService searchService, String query) { + return searchAcrossEntities(opContext, searchService, SEARCHABLE_ENTITIES, query, null); + } + + public static SearchResult searchAcrossEntities( + OperationContext opContext, + SearchService searchService, + List entityNames, + String query) { + return searchAcrossEntities(opContext, searchService, entityNames, query, null); + } + + public static SearchResult searchAcrossEntities( + OperationContext opContext, + SearchService searchService, + List entityNames, + String query, + Filter filter) { return searchService.searchAcrossEntities( - searchableEntities, + opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), + entityNames, query, - null, + filter, null, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true)); + null); } - public static SearchResult search(SearchService searchService, String query) { - return search(searchService, SEARCHABLE_ENTITIES, query); + public static SearchResult search( + OperationContext opContext, SearchService searchService, String query) { + return search(opContext, searchService, SEARCHABLE_ENTITIES, query); } public static SearchResult search( - SearchService searchService, List entities, String query) { + OperationContext opContext, + SearchService searchService, + List entities, + String query) { return searchService.search( + opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), entities, query, null, null, 0, - 100, - new SearchFlags().setFulltext(true).setSkipCache(true)); + 100); } public static ScrollResult scroll( - SearchService searchService, String query, int batchSize, @Nullable String scrollId) { + OperationContext opContext, + SearchService searchService, + String query, + int batchSize, + @Nullable String scrollId) { return searchService.scrollAcrossEntities( + opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), SEARCHABLE_ENTITIES, query, null, null, scrollId, "3m", - batchSize, - new SearchFlags().setFulltext(true).setSkipCache(true)); + batchSize); } - public static SearchResult searchStructured(SearchService searchService, String query) { + public static SearchResult searchStructured( + OperationContext opContext, SearchService searchService, String query) { return searchService.searchAcrossEntities( + opContext.withSearchFlags(flags -> flags.setFulltext(false).setSkipCache(true)), SEARCHABLE_ENTITIES, query, null, null, 0, - 100, - new SearchFlags().setFulltext(false).setSkipCache(true)); + 100); } public static LineageSearchResult lineage( - LineageSearchService lineageSearchService, Urn root, int hops) { + OperationContext opContext, LineageSearchService lineageSearchService, Urn root, int hops) { String degree = hops >= 3 ? "3+" : String.valueOf(hops); List filters = List.of( @@ -153,6 +172,7 @@ public static LineageSearchResult lineage( .build()); return lineageSearchService.searchAcrossLineage( + opContext.withSearchFlags(flags -> flags.setSkipCache(true)), root, LineageDirection.DOWNSTREAM, SEARCHABLE_ENTITY_TYPES.stream() @@ -165,12 +185,14 @@ public static LineageSearchResult lineage( 0, 100, null, - null, - new SearchFlags().setSkipCache(true)); + null); } public static AutoCompleteResults autocomplete( - SearchableEntityType searchableEntityType, String query) throws Exception { + OperationContext opContext, + SearchableEntityType searchableEntityType, + String query) + throws Exception { return searchableEntityType.autoComplete( query, null, @@ -191,6 +213,11 @@ public Authentication getAuthentication() { public Authorizer getAuthorizer() { return null; } + + @Override + public OperationContext getOperationContext() { + return opContext; + } }); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java index ae81eaf1ef388..8df86bef250d3 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java @@ -18,6 +18,8 @@ import com.linkedin.metadata.models.registry.EntityRegistryException; import com.linkedin.metadata.models.registry.SnapshotEntityRegistry; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.Map; import org.springframework.boot.test.context.TestConfiguration; @@ -88,4 +90,9 @@ protected AspectRetriever snapshotRegistryAspectRetriever() when(aspectRetriever.getLatestAspectObjects(any(), any())).thenReturn(Map.of()); return aspectRetriever; } + + @Bean(name = "systemOperationContext") + public OperationContext systemOperationContext(final EntityRegistry entityRegistry) { + return TestOperationContexts.systemContextNoSearchAuthorization(entityRegistry); + } } diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index 018646fad07fc..2b2643f088da4 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -24,7 +24,8 @@ "com.linkedin.gms.factory.form", "com.linkedin.gms.factory.incident", "com.linkedin.gms.factory.timeline.eventgenerator", - "io.datahubproject.metadata.jobs.common.health.kafka" + "io.datahubproject.metadata.jobs.common.health.kafka", + "com.linkedin.gms.factory.context" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index 22fbe7fc6b6ca..68965e5bde261 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -3,6 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.service.FormService; import io.datahubproject.metadata.jobs.common.health.kafka.KafkaHealthIndicator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -15,13 +16,16 @@ classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired private EntityService _mockEntityService; + @Autowired private EntityService mockEntityService; @Autowired private KafkaHealthIndicator kafkaHealthIndicator; + @Autowired private FormService formService; + @Test public void testMaeConsumerAutoWiring() { - assertNotNull(_mockEntityService); + assertNotNull(mockEntityService); assertNotNull(kafkaHealthIndicator); + assertNotNull(formService); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 7a1aaa7f6a056..b212eb11e50c0 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -23,7 +23,6 @@ import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -38,6 +37,7 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; @@ -71,20 +71,22 @@ public class SiblingAssociationHook implements MetadataChangeLogHook { public static final String SOURCE_SUBTYPE_V1 = "source"; public static final String SOURCE_SUBTYPE_V2 = "Source"; - private final EntityRegistry _entityRegistry; - private final SystemEntityClient _entityClient; - private final EntitySearchService _searchService; + private final EntityRegistry entityRegistry; + private final SystemEntityClient systemEntityClient; + private final EntitySearchService entitySearchService; private final boolean _isEnabled; + private final OperationContext opContext; @Autowired public SiblingAssociationHook( - @Nonnull final EntityRegistry entityRegistry, - @Nonnull final SystemEntityClient entityClient, + @Nonnull final OperationContext opContext, + @Nonnull final SystemEntityClient systemEntityClient, @Nonnull final EntitySearchService searchService, @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { - _entityRegistry = entityRegistry; - _entityClient = entityClient; - _searchService = searchService; + this.opContext = opContext; + this.entityRegistry = opContext.getEntityRegistryContext().getEntityRegistry(); + this.systemEntityClient = systemEntityClient; + entitySearchService = searchService; _isEnabled = isEnabled; } @@ -135,14 +137,16 @@ public void invoke(@Nonnull MetadataChangeLog event) { private void handleEntityKeyEvent(DatasetUrn datasetUrn) { Filter entitiesWithYouAsSiblingFilter = createFilterForEntitiesWithYouAsSibling(datasetUrn); final SearchResult searchResult = - _searchService.search( + entitySearchService.search( + opContext.withSearchFlags( + flags -> + flags.setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)), List.of(DATASET_ENTITY_NAME), "*", entitiesWithYouAsSiblingFilter, null, 0, - 10, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + 10); // we have a match of an entity with you as a sibling, associate yourself back searchResult @@ -265,7 +269,7 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { dbtSiblingProposal.setEntityUrn(dbtUrn); try { - _entityClient.ingestProposal(dbtSiblingProposal, true); + systemEntityClient.ingestProposal(dbtSiblingProposal, true); } catch (RemoteInvocationException e) { log.error("Error while associating {} with {}: {}", dbtUrn, sourceUrn, e.toString()); throw new RuntimeException("Error ingesting sibling proposal. Skipping processing.", e); @@ -290,7 +294,7 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { .filter( urn -> { try { - return _entityClient.exists(urn); + return systemEntityClient.exists(urn); } catch (RemoteInvocationException e) { log.error("Error while checking existence of {}: {}", urn, e.toString()); throw new RuntimeException("Error checking existence. Skipping processing.", e); @@ -312,7 +316,7 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { sourceSiblingProposal.setEntityUrn(sourceUrn); try { - _entityClient.ingestProposal(sourceSiblingProposal, true); + systemEntityClient.ingestProposal(sourceSiblingProposal, true); } catch (RemoteInvocationException e) { log.error("Error while associating {} with {}: {}", dbtUrn, sourceUrn, e.toString()); throw new RuntimeException("Error ingesting sibling proposal. Skipping processing.", e); @@ -338,7 +342,7 @@ private boolean isEligibleForProcessing(final MetadataChangeLog event) { private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; try { - entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); + entitySpec = entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); throw new RuntimeException( @@ -359,7 +363,7 @@ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog even } try { - entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); + entitySpec = entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); throw new RuntimeException( @@ -383,7 +387,7 @@ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { } try { - entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); + entitySpec = entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); throw new RuntimeException( @@ -427,7 +431,7 @@ private Filter createFilterForEntitiesWithYouAsSibling(final Urn entityUrn) { private SubTypes getSubtypesFromEntityClient(final Urn urn) { try { EntityResponse entityResponse = - _entityClient.getV2(urn, ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); + systemEntityClient.getV2(urn, ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); if (entityResponse != null && entityResponse.hasAspects() @@ -445,7 +449,7 @@ private SubTypes getSubtypesFromEntityClient(final Urn urn) { private UpstreamLineage getUpstreamLineageFromEntityClient(final Urn urn) { try { EntityResponse entityResponse = - _entityClient.getV2(urn, ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME)); + systemEntityClient.getV2(urn, ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME)); if (entityResponse != null && entityResponse.hasAspects() @@ -467,7 +471,7 @@ private UpstreamLineage getUpstreamLineageFromEntityClient(final Urn urn) { private Siblings getSiblingsFromEntityClient(final Urn urn) { try { EntityResponse entityResponse = - _entityClient.getV2(urn, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); + systemEntityClient.getV2(urn, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); if (entityResponse != null && entityResponse.hasAspects() diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 3823668adeace..2a83a2310518f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -26,7 +27,6 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; @@ -34,6 +34,8 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; @@ -43,6 +45,7 @@ public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; SystemEntityClient _mockEntityClient; EntitySearchService _mockSearchService; + OperationContext opContext; @BeforeMethod public void setupTest() { @@ -53,8 +56,9 @@ public void setupTest() { .getResourceAsStream("test-entity-registry-siblings.yml")); _mockEntityClient = Mockito.mock(SystemEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); + opContext = TestOperationContexts.systemContextNoSearchAuthorization(registry); _siblingAssociationHook = - new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); + new SiblingAssociationHook(opContext, _mockEntityClient, _mockSearchService, true); _siblingAssociationHook.setEnabled(true); } @@ -69,13 +73,12 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); - Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); + when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + when(_mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) .thenReturn(mockResponse); MetadataChangeLog event = @@ -145,7 +148,7 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { SubTypes mockSourceSubtypesAspect = new SubTypes(); mockSourceSubtypesAspect.setTypeNames(new StringArray(ImmutableList.of("model"))); - Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); + when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); mockResponseMap.put( @@ -154,13 +157,12 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); - Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); + when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + when(_mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) .thenReturn(mockResponse); MetadataChangeLog event = @@ -206,7 +208,7 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { @Test public void testInvokeWhenThereIsAPairWithBigqueryDownstreamNode() throws Exception { - Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); + when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); @@ -271,7 +273,7 @@ public void testInvokeWhenThereIsAPairWithBigqueryDownstreamNode() throws Except @Test public void testInvokeWhenThereIsAKeyBeingReingested() throws Exception { - Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); + when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); SearchResult returnSearchResult = new SearchResult(); SearchEntityArray returnEntityArray = new SearchEntityArray(); @@ -283,19 +285,8 @@ public void testInvokeWhenThereIsAKeyBeingReingested() throws Exception { returnSearchResult.setEntities(returnEntityArray); - Mockito.when( - _mockSearchService.search( - any(), - anyString(), - any(), - any(), - anyInt(), - anyInt(), - eq( - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true)))) + when(_mockSearchService.search( + any(OperationContext.class), any(), anyString(), any(), any(), anyInt(), anyInt())) .thenReturn(returnSearchResult); MetadataChangeLog event = diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index a80017a0956b2..789918a4b164c 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -19,6 +19,10 @@ import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.test.metadata.context.TestOperationContexts; import org.apache.avro.generic.GenericRecord; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.mock.mockito.MockBean; @@ -80,4 +84,18 @@ public SystemEntityClient systemEntityClient( @MockBean public SchemaRegistryService schemaRegistryService; @MockBean public EntityIndexBuilders entityIndexBuilders; + + @Bean(name = "systemOperationContext") + public OperationContext operationContext( + final EntityRegistry entityRegistry, + @Qualifier("systemAuthentication") final Authentication systemAuthentication, + final IndexConvention indexConvention) { + when(systemAuthentication.getActor()) + .thenReturn(TestOperationContexts.TEST_SYSTEM_AUTH.getActor()); + return OperationContext.asSystem( + OperationContextConfig.builder().build(), + entityRegistry, + systemAuthentication, + indexConvention); + } } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index 1210bf37059b4..0b0c8f622efd2 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -32,7 +32,8 @@ "com.linkedin.metadata.dao.producer", "com.linkedin.gms.factory.form", "com.linkedin.metadata.dao.producer", - "io.datahubproject.metadata.jobs.common.health.kafka" + "io.datahubproject.metadata.jobs.common.health.kafka", + "com.linkedin.gms.factory.context" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index 93a6ae8fb4797..ce093e3115f8e 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.kafka; -import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.gms.factory.context.SystemOperationContextFactory; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.SiblingGraphService; @@ -15,6 +15,7 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; +import io.datahubproject.metadata.context.OperationContext; import io.ebean.Database; import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; @@ -27,7 +28,7 @@ import org.springframework.context.annotation.Primary; @TestConfiguration -@Import(value = {SystemAuthenticationFactory.class}) +@Import(value = {SystemAuthenticationFactory.class, SystemOperationContextFactory.class}) public class MceConsumerApplicationTestConfiguration { @Autowired private TestRestTemplate restTemplate; @@ -39,15 +40,15 @@ public class MceConsumerApplicationTestConfiguration { @Bean @Primary public SystemEntityClient systemEntityClient( - @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + @Qualifier("systemOperationContext") final OperationContext systemOperationContext, + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider) { String selfUri = restTemplate.getRootUri(); final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); return new SystemRestliEntityClient( + systemOperationContext, restClient, new ExponentialBackoff(1), 1, - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); } diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/Ownership.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/Ownership.pdl index cee882762814e..d76b60c061b4b 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/common/Ownership.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/common/Ownership.pdl @@ -13,6 +13,17 @@ record Ownership { */ owners: array[Owner] + /** + * Ownership type to Owners map, populated via mutation hook. + */ + @Searchable = { + "/*": { + "fieldType": "MAP_ARRAY", + "queryByDefault": false + } + } + ownerTypes: optional map[string, array[Urn]] = {} + /** * Audit stamp containing who last modified the record and when. A value of 0 in the time field indicates missing data. */ diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl index 67f41ea175b51..355a8bb7a5cb3 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl @@ -38,4 +38,14 @@ record SearchFlags { * Instructions for grouping results before returning */ groupingSpec: optional GroupingSpec + + /** + * include soft deleted entities in results + */ + includeSoftDeleted:optional boolean = false + + /** + * include restricted entities in results (default is to filter) + */ + includeRestricted:optional boolean = false } diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/search/SearchEntity.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/search/SearchEntity.pdl index 1010e3e2330cc..df457c1ba26f9 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/metadata/search/SearchEntity.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/search/SearchEntity.pdl @@ -29,4 +29,10 @@ record SearchEntity { features: optional map[string, double] score: optional double + + /** + * A list of the the restricted aspects on the entity. + * If the key aspect is present, assume ALL aspects should be restricted including the entity's Urn. + */ + restrictedAspects: optional array[string] } \ No newline at end of file diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index cf8eb738e2443..22fe6551eb528 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -574,4 +574,13 @@ plugins: enabled: true supportedEntityAspectNames: - entityName: '*' - aspectName: structuredProperties \ No newline at end of file + aspectName: structuredProperties + - className: 'com.linkedin.metadata.aspect.hooks.OwnerTypeMap' + enabled: true + supportedOperations: + - UPSERT + - CREATE + - RESTATE + supportedEntityAspectNames: + - entityName: '*' + aspectName: ownership \ No newline at end of file diff --git a/metadata-operation-context/build.gradle b/metadata-operation-context/build.gradle new file mode 100644 index 0000000000000..1be98cb0140f3 --- /dev/null +++ b/metadata-operation-context/build.gradle @@ -0,0 +1,15 @@ +plugins { + id 'java-library' +} + +dependencies { + api project(':metadata-utils') + api project(':metadata-auth:auth-api') + + implementation externalDependency.slf4jApi + compileOnly externalDependency.lombok + + annotationProcessor externalDependency.lombok + testImplementation externalDependency.testng + testImplementation externalDependency.mockito +} \ No newline at end of file diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ActorContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ActorContext.java new file mode 100644 index 0000000000000..6c8077923e67f --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ActorContext.java @@ -0,0 +1,71 @@ +package io.datahubproject.metadata.context; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.policy.DataHubPolicyInfo; +import java.util.Collection; +import java.util.Collections; +import java.util.Optional; +import java.util.Set; +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class ActorContext implements ContextInterface { + + public static ActorContext asSystem(Authentication systemAuthentication) { + return ActorContext.builder().systemAuth(true).authentication(systemAuthentication).build(); + } + + public static ActorContext asSessionRestricted( + Authentication authentication, + Set dataHubPolicySet, + Collection groupMembership) { + return ActorContext.builder() + .systemAuth(false) + .authentication(authentication) + .policyInfoSet(dataHubPolicySet) + .groupMembership(groupMembership) + .build(); + } + + private final Authentication authentication; + @Builder.Default private final Set policyInfoSet = Collections.emptySet(); + @Builder.Default private final Collection groupMembership = Collections.emptyList(); + private final boolean systemAuth; + + public Urn getActorUrn() { + return UrnUtils.getUrn(authentication.getActor().toUrnStr()); + } + + /** + * The current implementation creates a cache entry unique for the set of policies. + * + *

We are relying on the consistent hash code implementation of String and the consistent + * conversion of the policy into a String + * + * @return + */ + @Override + public Optional getCacheKeyComponent() { + return Optional.of( + policyInfoSet.stream() + .filter(policy -> PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) + .mapToInt( + policy -> { + if (policy.getActors().hasResourceOwners() + || policy.getActors().hasResourceOwnersTypes()) { + // results are based on actor, distinct() added to remove duplicate sums of + // multiple owner policies + return authentication.getActor().toUrnStr().hashCode(); + } else { + return policy.toString().hashCode(); + } + }) + .distinct() + .sum()); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/AuthorizerContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/AuthorizerContext.java new file mode 100644 index 0000000000000..fdd84f6d64557 --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/AuthorizerContext.java @@ -0,0 +1,28 @@ +package io.datahubproject.metadata.context; + +import com.datahub.plugins.auth.authorization.Authorizer; +import java.util.Optional; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class AuthorizerContext implements ContextInterface { + + public static final AuthorizerContext EMPTY = + AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build(); + + @Nonnull private final Authorizer authorizer; + + /** + * No need to consider the authorizer in the cache context since it is ultimately determined by + * the underlying search context + * + * @return + */ + @Override + public Optional getCacheKeyComponent() { + return Optional.empty(); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ContextInterface.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ContextInterface.java new file mode 100644 index 0000000000000..6e02f273cbb21 --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ContextInterface.java @@ -0,0 +1,21 @@ +package io.datahubproject.metadata.context; + +import java.util.Optional; + +public interface ContextInterface { + /** + * Caching layers must take into account the operation's context to avoid returning incorrect or + * restricted results. + * + *

A consistent hash must be produced in a distributed cache so that multiple jvms produce the + * same keys for the same objects within the same context. This generally rules out hashCode() + * since those are not guaranteed properties. + * + *

We are however leveraging the special case of hashCode() for String which should be + * consistent across jvms and executions. + * + *

The overall context must produce a unique id to be included with cache keys. Each component + * of the OperationContext must produce a unique identifier to be used for this purpose. + */ + Optional getCacheKeyComponent(); +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EntityRegistryContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EntityRegistryContext.java new file mode 100644 index 0000000000000..c026a453604ee --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EntityRegistryContext.java @@ -0,0 +1,22 @@ +package io.datahubproject.metadata.context; + +import com.linkedin.metadata.models.registry.EntityRegistry; +import java.util.Optional; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class EntityRegistryContext implements ContextInterface { + public static EntityRegistryContext EMPTY = EntityRegistryContext.builder().build(); + + @Nullable private final EntityRegistry entityRegistry; + + @Override + public Optional getCacheKeyComponent() { + return entityRegistry == null + ? Optional.empty() + : Optional.ofNullable(entityRegistry.getIdentifier()).map(String::hashCode); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java new file mode 100644 index 0000000000000..d2c038c26e325 --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java @@ -0,0 +1,285 @@ +package io.datahubproject.metadata.context; + +import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import java.util.Collection; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Function; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +/** + * These contexts define a read/write context which allows more flexibility when reading and writing + * to various data stores. This context can be considered per **operation** and allows for + * supporting database read replicas, mirroring or sharding across multiple databases/elasticsearch + * instances, and separation of data at the storage level. + * + *

Different operations might also include different EntityRegistries + * + *

An integral part of the operation's context is additionally the user's identity and this + * context encompasses the `Authentication` context. + */ +@Builder(toBuilder = true) +@Getter +public class OperationContext { + + /** + * This should be the primary entry point when a request is made to Rest.li, OpenAPI, Graphql or + * other service layers. + * + *

Copy the context from a system level context to a specific request/user context. Inheriting + * all other contexts except for the sessionActor. Consider this a down leveling of the access. + * + *

This allows the context to contain system context such as elasticsearch and database + * contexts which are inherited from the system. + * + * @param systemOperationContext the base operation context + * @param sessionAuthentication the lower level authentication + * @param allowSystemAuthentication whether the context is allowed to escalate as needed + * @return the new context + */ + public static OperationContext asSession( + OperationContext systemOperationContext, + @Nonnull Authorizer authorizer, + @Nonnull Authentication sessionAuthentication, + boolean allowSystemAuthentication) { + return systemOperationContext.toBuilder() + .operationContextConfig( + // update allowed system authentication + systemOperationContext.getOperationContextConfig().toBuilder() + .allowSystemAuthentication(allowSystemAuthentication) + .build()) + .authorizerContext(AuthorizerContext.builder().authorizer(authorizer).build()) + .build(sessionAuthentication); + } + + /** + * Apply a set of default flags on top of any existing search flags + * + * @param opContext + * @param flagDefaults + * @return + */ + public static OperationContext withSearchFlags( + OperationContext opContext, Function flagDefaults) { + + return opContext.toBuilder() + // update search flags for the request's session + .searchContext(opContext.getSearchContext().withFlagDefaults(flagDefaults)) + .build(opContext.getSessionAuthentication()); + } + + /** + * Set the system authentication object AND allow escalation of privilege for the session. This + * OperationContext typically serves the default. + * + *

If you'd like to set the system authentication but not allow escalation, use the + * systemActorContext() directly which does not reconfigure the escalation configuration. + * + * @param systemAuthentication the system authentication + * @return builder + */ + public static OperationContext asSystem( + @Nonnull OperationContextConfig config, + @Nonnull EntityRegistry entityRegistry, + @Nonnull Authentication systemAuthentication, + @Nonnull IndexConvention indexConvention) { + + ActorContext systemActorContext = + ActorContext.builder().systemAuth(true).authentication(systemAuthentication).build(); + OperationContextConfig systemConfig = + config.toBuilder().allowSystemAuthentication(true).build(); + SearchContext systemSearchContext = + SearchContext.builder().indexConvention(indexConvention).build(); + + return OperationContext.builder() + .operationContextConfig(systemConfig) + .systemActorContext(systemActorContext) + .searchContext(systemSearchContext) + .entityRegistryContext( + EntityRegistryContext.builder().entityRegistry(entityRegistry).build()) + // Authorizer.EMPTY doesn't actually apply to system auth + .authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build()) + .build(systemAuthentication); + } + + @Nonnull private final OperationContextConfig operationContextConfig; + @Nonnull private final ActorContext sessionActorContext; + @Nullable private final ActorContext systemActorContext; + @Nonnull private final SearchContext searchContext; + @Nonnull private final AuthorizerContext authorizerContext; + @Nonnull private final EntityRegistryContext entityRegistryContext; + + public OperationContext withSearchFlags( + @Nonnull Function flagDefaults) { + return OperationContext.withSearchFlags(this, flagDefaults); + } + + public OperationContext asSession( + @Nonnull Authorizer authorizer, @Nonnull Authentication sessionAuthentication) { + return OperationContext.asSession( + this, + authorizer, + sessionAuthentication, + getOperationContextConfig().isAllowSystemAuthentication()); + } + + @Nonnull + public EntityRegistry getEntityRegistry() { + return getEntityRegistryContext().getEntityRegistry(); + } + + /** + * Requests for a generic authentication should return the system first if allowed. + * + * @return an entity client + */ + @Nonnull + public ActorContext getActorContext() { + if (operationContextConfig.isAllowSystemAuthentication() && systemActorContext != null) { + return systemActorContext; + } else { + return sessionActorContext; + } + } + + /** + * Other users within the same group as the actor + * + * @return + */ + public Collection getActorPeers() { + return authorizerContext.getAuthorizer().getActorPeers(sessionActorContext.getActorUrn()); + } + + /** + * Whether default authentication is system level + * + * @return + */ + public boolean isSystemAuth() { + return operationContextConfig.isAllowSystemAuthentication() + && sessionActorContext.isSystemAuth(); + } + + /** + * Requests for a generic authentication should return the system first if allowed. + * + * @return an entity client + */ + public Authentication getAuthentication() { + return getActorContext().getAuthentication(); + } + + public Authentication getSessionAuthentication() { + return sessionActorContext.getAuthentication(); + } + + public Optional getSystemAuthentication() { + return Optional.ofNullable(systemActorContext).map(ActorContext::getAuthentication); + } + + /** AuditStamp prefer session authentication */ + public AuditStamp getAuditStamp(@Nullable Long currentTimeMs) { + return AuditStampUtils.getAuditStamp( + UrnUtils.getUrn(sessionActorContext.getAuthentication().getActor().toUrnStr()), + currentTimeMs); + } + + public AuditStamp getAuditStamp() { + return getAuditStamp(null); + } + + /** + * Return a unique id for this context. Typically useful for building cache keys. We combine the + * different context components to create a single string representation of the hashcode across + * the contexts. + * + *

The overall context id can be comprised of one or more other contexts depending on the + * requirements. + * + * @return id representing this context instance's unique identifier + */ + public String getGlobalContextId() { + return String.valueOf( + ImmutableSet.builder() + .add(getOperationContextConfig()) + .add(getAuthorizerContext()) + .add(getActorContext()) + .add(getSearchContext()) + .add(getEntityRegistryContext()) + .build() + .stream() + .map(ContextInterface::getCacheKeyComponent) + .filter(Optional::isPresent) + .mapToInt(Optional::get) + .sum()); + } + + // Context id specific to contexts which impact search responses + public String getSearchContextId() { + return String.valueOf( + ImmutableSet.builder() + .add(getOperationContextConfig()) + .add(getActorContext()) + .add(getSearchContext()) + .add(getEntityRegistryContext()) + .build() + .stream() + .map(ContextInterface::getCacheKeyComponent) + .filter(Optional::isPresent) + .mapToInt(Optional::get) + .sum()); + } + + // Context id specific to entity lookups (not search) + public String getEntityContextId() { + return String.valueOf( + ImmutableSet.builder() + .add(getOperationContextConfig()) + .add(getActorContext()) + .add(getEntityRegistryContext()) + .build() + .stream() + .map(ContextInterface::getCacheKeyComponent) + .filter(Optional::isPresent) + .mapToInt(Optional::get) + .sum()); + } + + public static class OperationContextBuilder { + + public OperationContext build(@Nonnull Authentication sessionAuthentication) { + final Urn actorUrn = UrnUtils.getUrn(sessionAuthentication.getActor().toUrnStr()); + return new OperationContext( + this.operationContextConfig, + ActorContext.builder() + .authentication(sessionAuthentication) + .systemAuth( + this.systemActorContext != null + && this.systemActorContext.getAuthentication().equals(sessionAuthentication)) + .policyInfoSet(this.authorizerContext.getAuthorizer().getActorPolicies(actorUrn)) + .groupMembership(this.authorizerContext.getAuthorizer().getActorGroups(actorUrn)) + .build(), + this.systemActorContext, + Objects.requireNonNull(this.searchContext), + Objects.requireNonNull(this.authorizerContext), + Objects.requireNonNull(this.entityRegistryContext)); + } + + private OperationContext build() { + return null; + } + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java new file mode 100644 index 0000000000000..f0e12f5a0ce2b --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java @@ -0,0 +1,24 @@ +package io.datahubproject.metadata.context; + +import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import java.util.Optional; +import lombok.Builder; +import lombok.Getter; + +@Builder(toBuilder = true) +@Getter +public class OperationContextConfig implements ContextInterface { + /** + * Whether the given session authentication is allowed to assume the system authentication as + * needed + */ + private final boolean allowSystemAuthentication; + + /** Configuration for search authorization */ + private final SearchAuthorizationConfiguration searchAuthorizationConfiguration; + + @Override + public Optional getCacheKeyComponent() { + return Optional.of(searchAuthorizationConfiguration.hashCode()); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java new file mode 100644 index 0000000000000..d4e3712309d6c --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java @@ -0,0 +1,86 @@ +package io.datahubproject.metadata.context; + +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +@Builder(toBuilder = true) +@Getter +public class SearchContext implements ContextInterface { + + public static SearchContext EMPTY = + SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + + public static SearchContext withFlagDefaults( + @Nonnull SearchContext searchContext, + @Nonnull Function flagDefaults) { + return searchContext.toBuilder() + // update search flags + .searchFlags(flagDefaults.apply(searchContext.getSearchFlags())) + .build(); + } + + @Nonnull private final IndexConvention indexConvention; + @Nonnull private final SearchFlags searchFlags; + + public boolean isRestrictedSearch() { + return Optional.ofNullable(searchFlags.isIncludeRestricted()).orElse(false); + } + + public SearchContext withFlagDefaults(Function flagDefaults) { + return SearchContext.withFlagDefaults(this, flagDefaults); + } + + /** + * Currently relying on the consistent hashing of String + * + * @return + */ + @Override + public Optional getCacheKeyComponent() { + return Optional.of( + Stream.of(indexConvention.getPrefix().orElse(""), keySearchFlags().toString()) + .mapToInt(String::hashCode) + .sum()); + } + + /** + * Only certain flags change the cache key + * + * @return + */ + private SearchFlags keySearchFlags() { + try { + // whether cache is enabled or not does not impact the result + return searchFlags.clone().setSkipCache(false); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + public static class SearchContextBuilder { + + public SearchContextBuilder searchFlags(@Nullable SearchFlags searchFlags) { + this.searchFlags = searchFlags != null ? searchFlags : buildDefaultSearchFlags(); + return this; + } + + public SearchContext build() { + if (this.searchFlags == null) { + searchFlags(buildDefaultSearchFlags()); + } + return new SearchContext(this.indexConvention, this.searchFlags); + } + } + + private static SearchFlags buildDefaultSearchFlags() { + return new SearchFlags().setSkipCache(false); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java new file mode 100644 index 0000000000000..88e2f7f04ca5a --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java @@ -0,0 +1,69 @@ +package io.datahubproject.test.metadata.context; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * Useful for testing. If the defaults are not sufficient, try using the .toBuilder() and replacing + * the parts that you are interested in customizing. + */ +public class TestOperationContexts { + public static final Authentication TEST_SYSTEM_AUTH = + new Authentication(new Actor(ActorType.USER, "testSystemUser"), ""); + public static final Authentication TEST_USER_AUTH = + new Authentication(new Actor(ActorType.USER, "datahub"), ""); + public static final IndexConvention TEST_EMPTY_INDEX_CONVENTION = IndexConventionImpl.NO_PREFIX; + + public static OperationContext systemContextNoSearchAuthorization( + @Nonnull EntityRegistry entityRegistry) { + return systemContextNoSearchAuthorization(entityRegistry, null); + } + + public static OperationContext systemContextNoSearchAuthorization( + @Nonnull EntityRegistry entityRegistry, @Nullable IndexConvention indexConvention) { + return OperationContext.asSystem( + OperationContextConfig.builder() + .searchAuthorizationConfiguration( + SearchAuthorizationConfiguration.builder().enabled(false).build()) + .build(), + entityRegistry, + TEST_SYSTEM_AUTH, + indexConvention != null ? indexConvention : TEST_EMPTY_INDEX_CONVENTION); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nonnull EntityRegistry entityRegistry, @Nonnull Urn userUrn) { + return userContextNoSearchAuthorization(entityRegistry, Authorizer.EMPTY, userUrn); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nonnull EntityRegistry entityRegistry, + @Nonnull Authorizer authorizer, + @Nonnull Urn userUrn) { + return userContextNoSearchAuthorization( + entityRegistry, + authorizer, + new Authentication(new Actor(ActorType.USER, userUrn.getId()), "")); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nonnull EntityRegistry entityRegistry, + @Nonnull Authorizer authorizer, + @Nonnull Authentication sessionAuthorization) { + return systemContextNoSearchAuthorization(entityRegistry) + .asSession(authorizer, sessionAuthorization); + } + + private TestOperationContexts() {} +} diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/ActorContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/ActorContextTest.java new file mode 100644 index 0000000000000..15fe2bc277b9b --- /dev/null +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/ActorContextTest.java @@ -0,0 +1,129 @@ +package io.datahubproject.metadata.context; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.policy.DataHubActorFilter; +import com.linkedin.policy.DataHubPolicyInfo; +import com.linkedin.policy.DataHubResourceFilter; +import com.linkedin.policy.PolicyMatchCondition; +import com.linkedin.policy.PolicyMatchCriterion; +import com.linkedin.policy.PolicyMatchCriterionArray; +import com.linkedin.policy.PolicyMatchFilter; +import java.util.List; +import java.util.Set; +import org.testng.annotations.Test; + +public class ActorContextTest { + + private static final DataHubPolicyInfo POLICY_ABC = + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setActors( + new DataHubActorFilter() + .setUsers( + new UrnArray( + UrnUtils.getUrn("urn:li:corpUser:userA"), + UrnUtils.getUrn("urn:li:corpUser:userB")))) + .setPrivileges(new StringArray(List.of("a", "b", "c"))); + + private static final DataHubPolicyInfo POLICY_D = + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setActors( + new DataHubActorFilter() + .setUsers( + new UrnArray( + UrnUtils.getUrn("urn:li:corpUser:userA"), + UrnUtils.getUrn("urn:li:corpUser:userB")))) + .setPrivileges(new StringArray(List.of("d"))); + + private static final DataHubPolicyInfo POLICY_ABC_RESOURCE = + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setActors( + new DataHubActorFilter() + .setUsers( + new UrnArray( + UrnUtils.getUrn("urn:li:corpUser:userA"), + UrnUtils.getUrn("urn:li:corpUser:userB")))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + List.of( + new PolicyMatchCriterion() + .setField("tag") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues(new StringArray("urn:li:tag:test"))))))) + .setPrivileges(new StringArray(List.of("a", "b", "c"))); + + private static final DataHubPolicyInfo POLICY_D_OWNER = + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setActors(new DataHubActorFilter().setResourceOwners(true)) + .setPrivileges(new StringArray(List.of("d"))); + + private static final DataHubPolicyInfo POLICY_D_OWNER_TYPE = + new DataHubPolicyInfo() + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setActors( + new DataHubActorFilter() + .setResourceOwnersTypes( + new UrnArray(UrnUtils.getUrn("urn:li:ownershipType:test")))) + .setPrivileges(new StringArray(List.of("d"))); + + @Test + public void actorContextId() { + Authentication userAuth = new Authentication(new Actor(ActorType.USER, "USER"), ""); + + assertEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(), Set.of()).getCacheKeyComponent(), + ActorContext.asSessionRestricted(userAuth, Set.of(), Set.of()).getCacheKeyComponent(), + "Expected equality across instances"); + + assertEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(), Set.of()).getCacheKeyComponent(), + ActorContext.asSessionRestricted( + userAuth, Set.of(), Set.of(UrnUtils.getUrn("urn:li:corpGroup:group1"))) + .getCacheKeyComponent(), + "Expected no impact to cache context from group membership"); + + assertEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_ABC, POLICY_D), Set.of()) + .getCacheKeyComponent(), + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_ABC, POLICY_D), Set.of()) + .getCacheKeyComponent(), + "Expected equality when non-ownership policies are identical"); + + assertNotEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_ABC_RESOURCE, POLICY_D), Set.of()) + .getCacheKeyComponent(), + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_ABC, POLICY_D), Set.of()) + .getCacheKeyComponent(), + "Expected differences with non-identical resource policy"); + + assertNotEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_D_OWNER), Set.of()) + .getCacheKeyComponent(), + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_D), Set.of()) + .getCacheKeyComponent(), + "Expected differences with ownership policy"); + + assertNotEquals( + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_D_OWNER_TYPE), Set.of()) + .getCacheKeyComponent(), + ActorContext.asSessionRestricted(userAuth, Set.of(POLICY_D), Set.of()) + .getCacheKeyComponent(), + "Expected differences with ownership type policy"); + } +} diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java new file mode 100644 index 0000000000000..81583deba0e6c --- /dev/null +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java @@ -0,0 +1,67 @@ +package io.datahubproject.metadata.context; + +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import org.testng.annotations.Test; + +public class OperationContextTest { + + @Test + public void testSystemPrivilegeEscalation() { + Authentication systemAuth = new Authentication(new Actor(ActorType.USER, "SYSTEM"), ""); + Authentication userAuth = new Authentication(new Actor(ActorType.USER, "USER"), ""); + + // Allows system authentication + OperationContext systemOpContext = + OperationContext.asSystem( + OperationContextConfig.builder().build(), + mock(EntityRegistry.class), + systemAuth, + IndexConventionImpl.NO_PREFIX); + + OperationContext opContext = systemOpContext.asSession(Authorizer.EMPTY, userAuth); + + assertEquals( + opContext.getAuthentication(), systemAuth, "Expected system authentication when allowed"); + assertEquals( + opContext.getAuditStamp().getActor().getId(), + "USER", + "Audit stamp expected to match the user's identity"); + assertEquals(opContext.getSessionAuthentication(), userAuth); + assertEquals(opContext.getSessionActorContext().getAuthentication(), userAuth); + assertEquals(opContext.getActorContext().getAuthentication(), systemAuth); + assertEquals(opContext.getSystemActorContext().getAuthentication(), systemAuth); + assertEquals(opContext.getSystemAuthentication().get(), systemAuth); + + // Do not allow system auth + OperationContext opContextNoSystem = + systemOpContext.toBuilder() + .operationContextConfig( + systemOpContext.getOperationContextConfig().toBuilder() + .allowSystemAuthentication(false) + .build()) + .build(userAuth); + + assertEquals( + opContextNoSystem.getAuthentication(), + userAuth, + "Expect user authentication when system authentication is not allowed"); + assertEquals( + opContextNoSystem.getAuditStamp().getActor().getId(), + "USER", + "Audit stamp expected to match the user's identity"); + assertEquals(opContextNoSystem.getSessionActorContext().getAuthentication(), userAuth); + assertEquals(opContextNoSystem.getActorContext().getAuthentication(), userAuth); + assertEquals(opContextNoSystem.getSystemActorContext().getAuthentication(), systemAuth); + assertEquals(opContextNoSystem.getSystemAuthentication().get(), systemAuth); + assertEquals(opContextNoSystem.getSystemActorContext().getAuthentication(), systemAuth); + assertEquals(opContextNoSystem.getSessionAuthentication(), userAuth); + } +} diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java new file mode 100644 index 0000000000000..26365c283fc57 --- /dev/null +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java @@ -0,0 +1,79 @@ +package io.datahubproject.metadata.context; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; + +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import org.testng.annotations.Test; + +public class SearchContextTest { + + @Test + public void searchContextId() { + SearchContext testNoFlags = + SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + + assertEquals( + testNoFlags.getCacheKeyComponent(), + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .build() + .getCacheKeyComponent(), + "Expected consistent context ids across instances"); + + SearchContext testWithFlags = + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .searchFlags(new SearchFlags().setFulltext(true)) + .build(); + + assertEquals( + testWithFlags.getCacheKeyComponent(), + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .searchFlags(new SearchFlags().setFulltext(true)) + .build() + .getCacheKeyComponent(), + "Expected consistent context ids across instances"); + + assertNotEquals( + testNoFlags.getCacheKeyComponent(), + testWithFlags.getCacheKeyComponent(), + "Expected differences in search flags to result in different caches"); + assertNotEquals( + testWithFlags.getCacheKeyComponent(), + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .searchFlags(new SearchFlags().setFulltext(true).setIncludeRestricted(true)) + .build() + .getCacheKeyComponent(), + "Expected differences in search flags to result in different caches"); + + assertNotEquals( + testNoFlags.getCacheKeyComponent(), + SearchContext.builder() + .indexConvention(new IndexConventionImpl("Some Prefix")) + .searchFlags(null) + .build() + .getCacheKeyComponent(), + "Expected differences in index convention to result in different caches"); + + assertNotEquals( + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .searchFlags( + new SearchFlags() + .setFulltext(false) + .setIncludeRestricted(true) + .setSkipAggregates(true)) + .build() + .getCacheKeyComponent(), + SearchContext.builder() + .indexConvention(IndexConventionImpl.NO_PREFIX) + .searchFlags(new SearchFlags().setFulltext(true).setIncludeRestricted(true)) + .build() + .getCacheKeyComponent(), + "Expected differences in search flags to result in different caches"); + } +} diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java index 5ed69d3e2ff8c..aefa4d17b42c9 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java @@ -1,5 +1,6 @@ package com.datahub.authorization; +import com.datahub.authorization.config.SearchAuthorizationConfiguration; import com.datahub.plugins.auth.authorization.Authorizer; import java.util.List; import lombok.Data; @@ -12,4 +13,6 @@ public class AuthorizationConfiguration { /** List of configurations for {@link Authorizer}s to be registered */ private List authorizers; + + private SearchAuthorizationConfiguration search; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java index 73add48958f60..731cf08185384 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java @@ -20,6 +20,7 @@ import com.linkedin.metadata.secret.SecretService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collections; import javax.annotation.Nonnull; @@ -58,34 +59,31 @@ public Urn getInviteTokenRole( @Nonnull public String getInviteToken( - @Nullable final String roleUrnStr, - boolean regenerate, - @Nonnull final Authentication authentication) + @Nonnull OperationContext opContext, @Nullable final String roleUrnStr, boolean regenerate) throws Exception { final Filter inviteTokenFilter = roleUrnStr == null ? createInviteTokenFilter() : createInviteTokenFilter(roleUrnStr); final SearchResult searchResult = - _entityClient.filter( - INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); + _entityClient.filter(opContext, INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10); final int numEntities = searchResult.getEntities().size(); // If there is more than one invite token, wipe all of them and generate a fresh one if (numEntities > 1) { - deleteExistingInviteTokens(searchResult, authentication); - return createInviteToken(roleUrnStr, authentication); + deleteExistingInviteTokens(searchResult, opContext.getAuthentication()); + return createInviteToken(roleUrnStr, opContext.getAuthentication()); } // If we want to regenerate, or there are no entities in the result, create a new invite token. if (regenerate || numEntities == 0) { - return createInviteToken(roleUrnStr, authentication); + return createInviteToken(roleUrnStr, opContext.getAuthentication()); } final SearchEntity searchEntity = searchResult.getEntities().get(0); final Urn inviteTokenUrn = searchEntity.getEntity(); com.linkedin.identity.InviteToken inviteToken = - getInviteTokenEntity(inviteTokenUrn, authentication); + getInviteTokenEntity(inviteTokenUrn, opContext.getAuthentication()); return _secretService.decrypt(inviteToken.getToken()); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java index 9e8c1928c9de0..5663ffffdb3d6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java @@ -2,7 +2,9 @@ import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; +import com.linkedin.policy.DataHubPolicyInfo; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -10,6 +12,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -154,4 +157,26 @@ private AuthorizedActors mergeAuthorizedActors( public DataHubAuthorizer getDefaultAuthorizer() { return (DataHubAuthorizer) defaultAuthorizer; } + + @Override + public Set getActorPolicies(@Nonnull Urn actorUrn) { + return authorizers.stream() + .flatMap(authorizer -> authorizer.getActorPolicies(actorUrn).stream()) + .collect(Collectors.toSet()); + } + + @Override + public Collection getActorGroups(@Nonnull Urn actorUrn) { + return authorizers.stream() + .flatMap(authorizer -> authorizer.getActorGroups(actorUrn).stream()) + .collect(Collectors.toList()); + } + + @Override + public Collection getActorPeers(@Nonnull Urn actorUrn) { + return authorizers.stream() + .flatMap(authorizer -> authorizer.getActorPeers(actorUrn).stream()) + .distinct() + .collect(Collectors.toList()); + } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index 350d57aae3783..b5c6910776e52 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -8,19 +8,23 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.policy.DataHubPolicyInfo; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -46,9 +50,6 @@ public enum AuthorizationMode { ALLOW_ALL } - // Credentials used to make / authorize requests as the internal system actor. - private final Authentication _systemAuthentication; - // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. private final Map> _policyCache = @@ -62,22 +63,24 @@ public enum AuthorizationMode { private final PolicyEngine _policyEngine; private EntitySpecResolver _entitySpecResolver; private AuthorizationMode _mode; + private final OperationContext systemOpContext; public static final String ALL = "ALL"; public DataHubAuthorizer( - final Authentication systemAuthentication, + @Nonnull final OperationContext systemOpContext, final EntityClient entityClient, final int delayIntervalSeconds, final int refreshIntervalSeconds, final AuthorizationMode mode, final int policyFetchSize) { - _systemAuthentication = Objects.requireNonNull(systemAuthentication); + this.systemOpContext = systemOpContext; _mode = Objects.requireNonNull(mode); - _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); + _policyEngine = + new PolicyEngine(systemOpContext.getAuthentication(), Objects.requireNonNull(entityClient)); _policyRefreshRunnable = new PolicyRefreshRunnable( - systemAuthentication, + systemOpContext, new PolicyFetcher(entityClient), _policyCache, readWriteLock.writeLock(), @@ -95,7 +98,7 @@ public void init(@Nonnull Map authorizerConfig, @Nonnull Authori public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { // 0. Short circuit: If the action is being performed by the system (root), always allow it. - if (isSystemRequest(request, this._systemAuthentication)) { + if (isSystemRequest(request, systemOpContext.getAuthentication())) { return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, null); } @@ -135,6 +138,45 @@ public List getGrantedPrivileges( policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } + @Override + public Set getActorPolicies(@Nonnull Urn actorUrn) { + // 1. Fetch all policies + final List policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); + + // 2. Actor identity + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); + + return policiesToEvaluate.stream() + .filter(policy -> PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) + .filter( + policy -> + policy.getActors().isResourceOwners() + || _policyEngine.isActorMatch( + resolvedActorSpec, + policy.getActors(), + Optional.empty(), + new PolicyEngine.PolicyEvaluationContext())) + .collect(Collectors.toSet()); + } + + @Override + public Collection getActorGroups(@Nonnull Urn actorUrn) { + // 1. Actor identity + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); + + return resolvedActorSpec.getGroupMembership().stream() + .map(UrnUtils::getUrn) + .collect(Collectors.toList()); + } + + @Override + public Collection getActorPeers(@Nonnull Urn actorUrn) { + // TODO: Fetch users from groups the actor is a member of + return List.of(actorUrn); + } + /** * Retrieves the current list of actors authorized to for a particular privilege against an * optional resource @@ -261,7 +303,7 @@ private List getOrDefault(String key, List @RequiredArgsConstructor static class PolicyRefreshRunnable implements Runnable { - private final Authentication _systemAuthentication; + private final OperationContext systemOpContext; private final PolicyFetcher _policyFetcher; private final Map> _policyCache; private final Lock writeLock; @@ -278,7 +320,7 @@ public void run() { while (total == null || scrollId != null) { try { final PolicyFetcher.PolicyFetchResult policyFetchResult = - _policyFetcher.fetchPolicies(count, scrollId, null, _systemAuthentication); + _policyFetcher.fetchPolicies(systemOpContext, count, scrollId, null); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index f078d2d316cae..13c50059031b6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -256,11 +256,10 @@ private boolean checkCondition( } /** - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, - * false otherwise. Returns true if the actor portion of a DataHub policy matches a the actor - * being evaluated, false otherwise. + * Returns true if the actor portion of a DataHub policy matches the actor being evaluated, false + * otherwise. */ - private boolean isActorMatch( + boolean isActorMatch( final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter, final Optional resourceSpec, diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java index 0485e3000ad17..cbd80c7755adb 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java @@ -3,12 +3,10 @@ import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -16,6 +14,7 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashSet; @@ -33,7 +32,7 @@ @Slf4j @RequiredArgsConstructor public class PolicyFetcher { - private final EntityClient _entityClient; + private final EntityClient entityClient; private static final SortCriterion POLICY_SORT_CRITERION = new SortCriterion().setField("lastUpdatedTimestamp").setOrder(SortOrder.DESCENDING); @@ -46,7 +45,7 @@ public class PolicyFetcher { */ @Deprecated public CompletableFuture fetchPolicies( - int start, String query, int count, Filter filter, Authentication authentication) { + OperationContext opContext, int start, String query, int count, Filter filter) { return CompletableFuture.supplyAsync( () -> { try { @@ -57,7 +56,7 @@ public CompletableFuture fetchPolicies( while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { PolicyFetchResult tmpResult = fetchPolicies( - query, count, scrollId.isEmpty() ? null : scrollId, filter, authentication); + opContext, query, count, scrollId.isEmpty() ? null : scrollId, filter); fetchedResults += tmpResult.getPolicies().size(); scrollId = tmpResult.getScrollId(); if (fetchedResults > start) { @@ -73,35 +72,32 @@ public CompletableFuture fetchPolicies( } public PolicyFetchResult fetchPolicies( - int count, @Nullable String scrollId, Filter filter, Authentication authentication) + OperationContext opContext, int count, @Nullable String scrollId, Filter filter) throws RemoteInvocationException, URISyntaxException { - return fetchPolicies("", count, scrollId, filter, authentication); + return fetchPolicies(opContext, "", count, scrollId, filter); } public PolicyFetchResult fetchPolicies( - String query, - int count, - @Nullable String scrollId, - Filter filter, - Authentication authentication) + OperationContext opContext, String query, int count, @Nullable String scrollId, Filter filter) throws RemoteInvocationException, URISyntaxException { log.debug(String.format("Batch fetching policies. count: %s, scroll: %s", count, scrollId)); // First fetch all policy urns ScrollResult result = - _entityClient.scrollAcrossEntities( + entityClient.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setSkipCache(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setFulltext(true)), List.of(POLICY_ENTITY_NAME), query, filter, scrollId, null, - count, - new SearchFlags() - .setSkipCache(true) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setFulltext(true), - authentication); + count); List policyUrns = result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); @@ -111,8 +107,8 @@ public PolicyFetchResult fetchPolicies( // Fetch DataHubPolicyInfo aspects for each urn final Map policyEntities = - _entityClient.batchGetV2( - POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); + entityClient.batchGetV2( + POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, opContext.getAuthentication()); return new PolicyFetchResult( policyUrns.stream() .map(policyEntities::get) diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java index cd9d5972103c1..775039766a2c9 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java @@ -18,6 +18,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.secret.SecretService; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -35,6 +36,7 @@ public class InviteTokenServiceTest { private EntityClient _entityClient; private SecretService _secretService; private InviteTokenService _inviteTokenService; + private OperationContext opContext; @BeforeMethod public void setupTest() throws Exception { @@ -42,7 +44,7 @@ public void setupTest() throws Exception { roleUrn = Urn.createFromString(ROLE_URN_STRING); _entityClient = mock(EntityClient.class); _secretService = mock(SecretService.class); - + opContext = mock(OperationContext.class); _inviteTokenService = new InviteTokenService(_entityClient, _secretService); } @@ -129,27 +131,22 @@ public void testGetInviteTokenRole() throws Exception { public void getInviteTokenRoleUrnDoesNotExist() throws Exception { when(_entityClient.exists(eq(roleUrn), eq(SYSTEM_AUTHENTICATION))).thenReturn(false); - assertThrows( - () -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); + assertThrows(() -> _inviteTokenService.getInviteToken(opContext, roleUrn.toString(), false)); } @Test public void getInviteTokenRegenerate() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); + when(opContext.getAuthentication()).thenReturn(SYSTEM_AUTHENTICATION); when(_entityClient.filter( - eq(INVITE_TOKEN_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - eq(SYSTEM_AUTHENTICATION))) + eq(opContext), eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); - _inviteTokenService.getInviteToken(null, true, SYSTEM_AUTHENTICATION); + _inviteTokenService.getInviteToken(opContext, null, true); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @@ -157,19 +154,15 @@ public void getInviteTokenRegenerate() throws Exception { public void getInviteTokenEmptySearchResult() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); + when(opContext.getAuthentication()).thenReturn(SYSTEM_AUTHENTICATION); when(_entityClient.filter( - eq(INVITE_TOKEN_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - eq(SYSTEM_AUTHENTICATION))) + eq(opContext), eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); - _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION); + _inviteTokenService.getInviteToken(opContext, null, false); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @@ -180,19 +173,15 @@ public void getInviteTokenNullEntity() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); + when(opContext.getAuthentication()).thenReturn(SYSTEM_AUTHENTICATION); when(_entityClient.filter( - eq(INVITE_TOKEN_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - eq(SYSTEM_AUTHENTICATION))) + eq(opContext), eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(searchResult); when(_entityClient.getV2( eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) .thenReturn(null); - assertThrows(() -> _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION)); + assertThrows(() -> _inviteTokenService.getInviteToken(opContext, null, false)); } @Test @@ -203,12 +192,7 @@ public void getInviteTokenNoInviteTokenAspect() throws Exception { searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); when(_entityClient.filter( - eq(INVITE_TOKEN_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - eq(SYSTEM_AUTHENTICATION))) + eq(opContext), eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); @@ -218,7 +202,7 @@ public void getInviteTokenNoInviteTokenAspect() throws Exception { when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); - assertThrows(() -> _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION)); + assertThrows(() -> _inviteTokenService.getInviteToken(opContext, null, false)); } @Test @@ -228,13 +212,9 @@ public void getInviteToken() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); + when(opContext.getAuthentication()).thenReturn(SYSTEM_AUTHENTICATION); when(_entityClient.filter( - eq(INVITE_TOKEN_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - eq(SYSTEM_AUTHENTICATION))) + eq(opContext), eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse(); @@ -251,8 +231,6 @@ public void getInviteToken() throws Exception { when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN_STRING))).thenReturn(INVITE_TOKEN_STRING); - assertEquals( - _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), - INVITE_TOKEN_STRING); + assertEquals(_inviteTokenService.getInviteToken(opContext, null, false), INVITE_TOKEN_STRING); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index 588cdf57269ef..c37dc70ef0649 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -39,14 +39,17 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.identity.GroupMembership; import com.linkedin.identity.RoleMembership; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.policy.DataHubActorFilter; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.policy.DataHubResourceFilter; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -69,6 +72,7 @@ public class DataHubAuthorizerTest { private EntityClient _entityClient; private DataHubAuthorizer _dataHubAuthorizer; + private OperationContext systemOpContext; @BeforeMethod public void setupTest() throws Exception { @@ -158,19 +162,13 @@ public void setupTest() throws Exception { ImmutableList.of(new SearchEntity().setEntity(adminPolicyUrn)))); when(_entityClient.scrollAcrossEntities( + any(OperationContext.class), eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), - anyInt(), - eq( - new SearchFlags() - .setFulltext(true) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setSkipCache(true)), - any())) + anyInt())) .thenReturn(policySearchResult1) .thenReturn(policySearchResult2) .thenReturn(policySearchResult3) @@ -268,10 +266,16 @@ public void setupTest() throws Exception { final Authentication systemAuthentication = new Authentication(new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), ""); + systemOpContext = + OperationContext.asSystem( + OperationContextConfig.builder().build(), + mock(EntityRegistry.class), + systemAuthentication, + mock(IndexConvention.class)); _dataHubAuthorizer = new DataHubAuthorizer( - systemAuthentication, + systemOpContext, _entityClient, 10, 10, @@ -358,14 +362,13 @@ public void testInvalidateCache() throws Exception { emptyResult.setEntities(new SearchEntityArray()); when(_entityClient.search( + any(OperationContext.class), eq("dataHubPolicy"), eq(""), isNull(), any(), anyInt(), - anyInt(), - any(), - eq(new SearchFlags().setFulltext(true)))) + anyInt())) .thenReturn(emptyResult); when(_entityClient.batchGetV2( eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())) diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 494c18b75f023..9e82430378827 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -45,6 +45,11 @@ authorization: cachePolicyFetchSize: ${POLICY_CACHE_FETCH_SIZE:1000} # Enables authorization of reads, writes, and deletes on REST APIs. Defaults to false for backwards compatibility, but should become true down the road restApiAuthorization: ${REST_API_AUTHORIZATION_ENABLED:false} + search: + enabled: ${SEARCH_AUTHORIZATION_ENABLED:false} + recommendations: + # Currently limited to the actor only, see TODO: DataHubAuthorizer + peerGroupEnabled: ${SEARCH_AUTHORIZATION_RECOMMENDATIONS_PEER_GROUP_ENABLED:true} ingestion: # The value of cliMajorVersion is substituted in by the processResources Gradle task. @@ -327,6 +332,11 @@ systemUpdate: batchSize: ${BOOTSTRAP_SYSTEM_UPDATE_POLICY_FIELDS_BATCH_SIZE:5000} reprocess: enabled: ${REPROCESS_DEFAULT_POLICY_FIELDS:false} + ownershipTypes: + enabled: ${BOOTSTRAP_SYSTEM_UPDATE_OWNERSHIP_TYPES_ENABLED:true} + batchSize: ${BOOTSTRAP_SYSTEM_UPDATE_OWNERSHIP_TYPES_BATCH_SIZE:1000} + reprocess: + enabled: ${BOOTSTRAP_SYSTEM_UPDATE_OWNERSHIP_TYPES_REPROCESS:false} structuredProperties: enabled: ${ENABLE_STRUCTURED_PROPERTIES_HOOK:true} # applies structured properties mappings diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 0935e8ad0e7d4..d29b770cbf0ee 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -3,7 +3,9 @@ import com.datahub.authorization.DataHubAuthorizer; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -26,7 +28,9 @@ public class DataHubAuthorizerFactory { @Bean(name = "dataHubAuthorizer") @Scope("singleton") @Nonnull - protected DataHubAuthorizer dataHubAuthorizer(final SystemEntityClient systemEntityClient) { + protected DataHubAuthorizer dataHubAuthorizer( + @Qualifier("systemOperationContext") final OperationContext systemOpContext, + final SystemEntityClient systemEntityClient) { final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled @@ -34,7 +38,7 @@ protected DataHubAuthorizer dataHubAuthorizer(final SystemEntityClient systemEnt : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; return new DataHubAuthorizer( - systemEntityClient.getSystemAuthentication(), + systemOpContext, systemEntityClient, 10, policyCacheRefreshIntervalSeconds, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java new file mode 100644 index 0000000000000..de161023faed9 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java @@ -0,0 +1,28 @@ +package com.linkedin.gms.factory.auth; + +import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.service.RestrictedService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.Scope; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class RestrictedServiceFactory { + + @Autowired + @Qualifier("dataHubSecretService") + private SecretService _secretService; + + @Bean(name = "restrictedService") + @Scope("singleton") + @Nonnull + protected RestrictedService getInstance() throws Exception { + return new RestrictedService(_secretService); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java new file mode 100644 index 0000000000000..0dfdee5fcbbbc --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java @@ -0,0 +1,44 @@ +package com.linkedin.gms.factory.context; + +import com.datahub.authentication.Authentication; +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.models.registry.EntityRegistry; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class SystemOperationContextFactory { + + @Autowired + @Qualifier("baseElasticSearchComponents") + private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; + + @Bean(name = "systemOperationContext") + @Nonnull + protected OperationContext systemOperationContext( + @Nonnull final EntityRegistry entityRegistry, + @Nonnull @Qualifier("systemAuthentication") final Authentication systemAuthentication, + @Nonnull final OperationContextConfig operationContextConfig) { + + return OperationContext.asSystem( + operationContextConfig, + entityRegistry, + systemAuthentication, + components.getIndexConvention()); + } + + @Bean + @Nonnull + protected OperationContextConfig operationContextConfig( + final ConfigurationProvider configurationProvider) { + return OperationContextConfig.builder() + .searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch()) + .build(); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java index 530136e32662f..9c9c2b7a70a65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java @@ -1,6 +1,5 @@ package com.linkedin.gms.factory.entityclient; -import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.client.JavaEntityClient; @@ -16,6 +15,7 @@ import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import io.datahubproject.metadata.context.OperationContext; import javax.inject.Singleton; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -32,6 +32,7 @@ public class JavaEntityClientFactory { @Bean("entityClient") @Singleton public EntityClient entityClient( + final OperationContext opContext, final @Qualifier("entityService") EntityService _entityService, final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, final @Qualifier("searchService") SearchService _searchService, @@ -43,6 +44,7 @@ public EntityClient entityClient( final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, final RollbackService rollbackService) { return new JavaEntityClient( + opContext, _entityService, _deleteEntityService, _entitySearchService, @@ -57,6 +59,7 @@ public EntityClient entityClient( @Bean("systemEntityClient") @Singleton public SystemEntityClient systemEntityClient( + final @Qualifier("systemOperationContext") OperationContext systemOperationContext, final @Qualifier("entityService") EntityService _entityService, final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, final @Qualifier("searchService") SearchService _searchService, @@ -67,9 +70,9 @@ public SystemEntityClient systemEntityClient( final @Qualifier("relationshipSearchService") LineageSearchService _lineageSearchService, final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, final RollbackService rollbackService, - final EntityClientCacheConfig entityClientCacheConfig, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + final EntityClientCacheConfig entityClientCacheConfig) { return new SystemJavaEntityClient( + systemOperationContext, _entityService, _deleteEntityService, _entitySearchService, @@ -79,7 +82,6 @@ public SystemEntityClient systemEntityClient( _timeseriesAspectService, rollbackService, _eventProducer, - systemAuthentication, entityClientCacheConfig); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java index 88989b1833e78..f2c970df681e6 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java @@ -1,6 +1,5 @@ package com.linkedin.gms.factory.entityclient; -import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; @@ -10,6 +9,7 @@ import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; +import io.datahubproject.metadata.context.OperationContext; import java.net.URI; import javax.inject.Singleton; import org.springframework.beans.factory.annotation.Qualifier; @@ -48,6 +48,7 @@ public EntityClient entityClient( @Bean("systemEntityClient") @Singleton public SystemEntityClient systemEntityClient( + @Qualifier("systemOperationContext") final OperationContext systemOperationContext, @Value("${datahub.gms.host}") String gmsHost, @Value("${datahub.gms.port}") int gmsPort, @Value("${datahub.gms.useSSL}") boolean gmsUseSSL, @@ -55,8 +56,7 @@ public SystemEntityClient systemEntityClient( @Value("${datahub.gms.sslContext.protocol}") String gmsSslProtocol, @Value("${entityClient.retryInterval:2}") int retryInterval, @Value("${entityClient.numRetries:3}") int numRetries, - final EntityClientCacheConfig entityClientCacheConfig, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + final EntityClientCacheConfig entityClientCacheConfig) { final Client restClient; if (gmsUri != null) { @@ -66,10 +66,10 @@ public SystemEntityClient systemEntityClient( DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } return new SystemRestliEntityClient( + systemOperationContext, restClient, new ExponentialBackoff(retryInterval), numRetries, - systemAuthentication, entityClientCacheConfig); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java index 73be819028f57..16631c52e9103 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java @@ -3,7 +3,9 @@ import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,7 +17,10 @@ public class FormServiceFactory { @Bean(name = "formService") @Scope("singleton") @Nonnull - protected FormService getInstance(final SystemEntityClient entityClient) throws Exception { - return new FormService(entityClient, entityClient.getSystemAuthentication()); + protected FormService getInstance( + @Qualifier("systemOperationContext") OperationContext systemOpContext, + final SystemEntityClient entityClient) + throws Exception { + return new FormService(systemOpContext, entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index 15bf674581b6a..73fb026398d2d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -32,6 +32,7 @@ import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; @@ -167,6 +168,10 @@ public class GraphQLEngineFactory { @Qualifier("formService") private FormService formService; + @Autowired + @Qualifier("restrictedService") + private RestrictedService restrictedService; + @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; @@ -213,6 +218,7 @@ protected GraphQLEngine graphQLEngine( args.setQueryService(queryService); args.setFeatureFlags(configProvider.getFeatureFlags()); args.setFormService(formService); + args.setRestrictedService(restrictedService); args.setDataProductService(dataProductService); args.setGraphQLQueryComplexityLimit( configProvider.getGraphQL().getQuery().getComplexityLimit()); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 0ba953d66730c..10833109e6a59 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -5,6 +5,7 @@ import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -33,9 +34,11 @@ public class IngestionSchedulerFactory { @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull - protected IngestionScheduler getInstance(final SystemEntityClient entityClient) { + protected IngestionScheduler getInstance( + @Qualifier("systemOperationContext") final OperationContext systemOpContext, + final SystemEntityClient entityClient) { return new IngestionScheduler( - entityClient.getSystemAuthentication(), + systemOpContext, entityClient, _configProvider.getIngestion(), _delayIntervalSeconds, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java index a7c2dde8b7d25..1df73399185fd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java @@ -4,6 +4,7 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.candidatesource.DomainsCandidateSource; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -21,7 +22,8 @@ public class DomainsCandidateSourceFactory { @Bean(name = "domainsCandidateSource") @Nonnull - protected DomainsCandidateSource getInstance(final EntityRegistry entityRegistry) { + protected DomainsCandidateSource getInstance( + final OperationContext opContext, final EntityRegistry entityRegistry) { return new DomainsCandidateSource(entitySearchService, entityRegistry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index 03e066a912e44..d0fe095ddfd91 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -1,6 +1,5 @@ package com.linkedin.gms.factory.usage; -import com.datahub.authentication.Authentication; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -8,6 +7,7 @@ import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; import com.linkedin.usage.UsageClient; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashMap; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; @@ -48,7 +48,7 @@ public class UsageClientFactory { @Bean("usageClient") public UsageClient getUsageClient( - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + @Qualifier("systemOperationContext") final OperationContext systemOperationContext) { Map params = new HashMap<>(); params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); @@ -56,10 +56,10 @@ public UsageClient getUsageClient( DefaultRestliClientFactory.getRestLiClient( gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); return new UsageClient( + systemOperationContext, restClient, new ExponentialBackoff(retryInterval), numRetries, - systemAuthentication, configurationProvider.getCache().getClient().getUsageClient()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index b808c3da5d8d0..4bf4313f4ad7e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -33,6 +33,7 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; @@ -103,10 +104,12 @@ public class BootstrapManagerFactory { @Bean(name = "bootstrapManager") @Scope("singleton") @Nonnull - protected BootstrapManager createInstance() { + protected BootstrapManager createInstance( + @Qualifier("systemOperationContext") final OperationContext systemOpContext) { final IngestRootUserStep ingestRootUserStep = new IngestRootUserStep(_entityService); final IngestPoliciesStep ingestPoliciesStep = new IngestPoliciesStep( + systemOpContext, _entityRegistry, _entityService, _entitySearchService, @@ -118,7 +121,8 @@ protected BootstrapManager createInstance() { final IngestDataPlatformInstancesStep ingestDataPlatformInstancesStep = new IngestDataPlatformInstancesStep(_entityService, _migrationsDao); final RestoreGlossaryIndices restoreGlossaryIndicesStep = - new RestoreGlossaryIndices(_entityService, _entitySearchService, _entityRegistry); + new RestoreGlossaryIndices( + systemOpContext, _entityService, _entitySearchService, _entityRegistry); final IndexDataPlatformsStep indexDataPlatformsStep = new IndexDataPlatformsStep(_entityService, _entitySearchService, _entityRegistry); final RestoreDbtSiblingsIndices restoreDbtSiblingsIndices = @@ -161,7 +165,8 @@ protected BootstrapManager createInstance() { } if (_backfillBrowsePathsV2Enabled) { - finalSteps.add(new BackfillBrowsePathsV2Step(_entityService, _searchService)); + finalSteps.add( + new BackfillBrowsePathsV2Step(systemOpContext, _entityService, _searchService)); } return new BootstrapManager(finalSteps); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index 49a86406c1ecd..30cfc304926c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -24,6 +24,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import io.datahubproject.metadata.context.OperationContext; import java.util.Set; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -46,11 +47,14 @@ public class BackfillBrowsePathsV2Step extends UpgradeStep { private static final String UPGRADE_ID = "backfill-default-browse-paths-v2-step"; private static final Integer BATCH_SIZE = 5000; - private final SearchService _searchService; + private final SearchService searchService; + private final OperationContext opContext; - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2Step( + OperationContext opContext, EntityService entityService, SearchService searchService) { super(entityService, VERSION, UPGRADE_ID); - _searchService = searchService; + this.searchService = searchService; + this.opContext = opContext; } @Nonnull @@ -106,8 +110,8 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S filter.setOr(conjunctiveCriterionArray); final ScrollResult scrollResult = - _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), "*", filter, null, scrollId, "5m", BATCH_SIZE, null); + searchService.scrollAcrossEntities( + opContext, ImmutableList.of(entityType), "*", filter, null, scrollId, "5m", BATCH_SIZE); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index f925c96e333fd..3a6b704613a56 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -27,6 +27,7 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.policy.DataHubPolicyInfo; +import io.datahubproject.metadata.context.OperationContext; import java.io.IOException; import java.net.URISyntaxException; import java.util.Collections; @@ -45,6 +46,7 @@ public class IngestPoliciesStep implements BootstrapStep { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; private static final String POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; + private final OperationContext systemOpContext; private final EntityRegistry _entityRegistry; private final EntityService _entityService; private final EntitySearchService _entitySearchService; @@ -113,7 +115,7 @@ public void execute() throws IOException, URISyntaxException { // If search index for policies is empty, update the policy index with the ingested policies // from previous step. // Directly update the ES index, does not produce MCLs - if (_entitySearchService.docCount(Constants.POLICY_ENTITY_NAME) == 0) { + if (_entitySearchService.docCount(systemOpContext, Constants.POLICY_ENTITY_NAME) == 0) { updatePolicyIndex(); } log.info("Successfully ingested default access policies."); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 5c2b2c28e6dcf..e0b912f483520 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -12,10 +12,10 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -34,26 +34,29 @@ public class RestoreGlossaryIndices extends UpgradeStep { private static final String UPGRADE_ID = "restore-glossary-indices-ui"; private static final Integer BATCH_SIZE = 1000; - private final EntitySearchService _entitySearchService; - private final EntityRegistry _entityRegistry; + private final OperationContext opContext; + private final EntitySearchService entitySearchService; + private final EntityRegistry entityRegistry; public RestoreGlossaryIndices( + OperationContext opContext, EntityService entityService, EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); - _entitySearchService = entitySearchService; - _entityRegistry = entityRegistry; + this.opContext = opContext; + this.entitySearchService = entitySearchService; + this.entityRegistry = entityRegistry; } @Override public void upgrade() throws Exception { final AspectSpec termAspectSpec = - _entityRegistry + entityRegistry .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); final AspectSpec nodeAspectSpec = - _entityRegistry + entityRegistry .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); final AuditStamp auditStamp = @@ -85,14 +88,16 @@ public ExecutionMode getExecutionMode() { private int getAndRestoreTermAspectIndices( int start, AuditStamp auditStamp, AspectSpec termAspectSpec) throws Exception { SearchResult termsResult = - _entitySearchService.search( + entitySearchService.search( + opContext.withSearchFlags( + flags -> + flags.setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)), List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, null, start, - BATCH_SIZE, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + BATCH_SIZE); List termUrns = termsResult.getEntities().stream() .map(SearchEntity::getEntity) @@ -153,14 +158,16 @@ private int getAndRestoreTermAspectIndices( private int getAndRestoreNodeAspectIndices( int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { SearchResult nodesResult = - _entitySearchService.search( + entitySearchService.search( + opContext.withSearchFlags( + flags -> + flags.setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)), List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", null, null, start, - BATCH_SIZE, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + BATCH_SIZE); List nodeUrns = nodesResult.getEntities().stream() .map(SearchEntity::getEntity) diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java index 6cc1d293e24e6..d28bb291ad236 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java @@ -2,6 +2,7 @@ import static com.datahub.util.RecordUtils.*; import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; +import static org.mockito.Mockito.mock; import com.google.common.collect.ImmutableList; import com.hazelcast.config.Config; @@ -14,6 +15,7 @@ import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -29,6 +31,8 @@ import com.linkedin.metadata.search.SearchResultMetadata; import com.linkedin.metadata.search.cache.CacheableSearcher; import com.linkedin.metadata.search.cache.CachedEntityLineageResult; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.time.temporal.ChronoUnit; import java.util.List; import org.javatuples.Quintet; @@ -56,6 +60,8 @@ public CacheTest() { @Test public void hazelcastTest() { + OperationContext systemOpContext = + TestOperationContexts.systemContextNoSearchAuthorization(mock(EntityRegistry.class)); CorpuserUrn corpuserUrn = new CorpuserUrn("user"); SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); SearchResult searchResult = @@ -83,7 +89,6 @@ public void hazelcastTest() { 10, querySize -> searchResult, querySize -> quintet, - null, true); CacheableSearcher< @@ -94,18 +99,18 @@ public void hazelcastTest() { 10, querySize -> searchResult, querySize -> quintet, - null, true); // Cache result - SearchResult result = cacheableSearcher1.getSearchResults(0, 1); + SearchResult result = cacheableSearcher1.getSearchResults(systemOpContext, 0, 1); Assert.assertNotEquals(result, null); Assert.assertEquals( instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); + Assert.assertEquals(cacheableSearcher1.getSearchResults(systemOpContext, 0, 1), searchResult); Assert.assertEquals( - cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); + cacheableSearcher1.getSearchResults(systemOpContext, 0, 1), + cacheableSearcher2.getSearchResults(systemOpContext, 0, 1)); } @Test @@ -181,7 +186,7 @@ public void testLineageCaching() { EntityLineageResultCacheKey key = new EntityLineageResultCacheKey( - corpuserUrn, LineageDirection.DOWNSTREAM, 0L, 1L, 1, ChronoUnit.DAYS); + "", corpuserUrn, LineageDirection.DOWNSTREAM, 0L, 1L, 1, ChronoUnit.DAYS); cache1.put(key, cachedEntityLineageResult); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 0858736e39021..86707ac0c6db5 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -20,6 +21,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -86,19 +88,19 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { .thenReturn(null); BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = - new BackfillBrowsePathsV2Step(mockService, mockSearchService); + new BackfillBrowsePathsV2Step(mock(OperationContext.class), mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); Mockito.verify(mockSearchService, Mockito.times(9)) .scrollAcrossEntities( + any(OperationContext.class), any(), Mockito.eq("*"), any(Filter.class), Mockito.eq(null), Mockito.eq(null), Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null)); + Mockito.eq(5000)); // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of // each entity type Mockito.verify(mockService, Mockito.times(11)) @@ -107,7 +109,7 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = mock(EntityService.class); final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); @@ -127,7 +129,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { .thenReturn(response); BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = - new BackfillBrowsePathsV2Step(mockService, mockSearchService); + new BackfillBrowsePathsV2Step(mock(OperationContext.class), mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); Mockito.verify(mockService, Mockito.times(0)) @@ -136,7 +138,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { } private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); @@ -153,19 +155,19 @@ private EntityService initMockService() throws URISyntaxException { } private SearchService initMockSearchService() { - final SearchService mockSearchService = Mockito.mock(SearchService.class); + final SearchService mockSearchService = mock(SearchService.class); for (int i = 0; i < ENTITY_TYPES.size(); i++) { Mockito.when( mockSearchService.scrollAcrossEntities( + Mockito.any(OperationContext.class), Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), Mockito.eq("*"), any(Filter.class), Mockito.eq(null), Mockito.eq(null), Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null))) + Mockito.eq(5000))) .thenReturn( new ScrollResult() .setNumEntities(1) diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index 4a4532763f02b..426e52d3636f7 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -15,13 +15,13 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -54,16 +54,13 @@ private void mockGetTermInfo( .setAspects(new EnvelopedAspectMap(termInfoAspects))); Mockito.when( mockSearchService.search( - List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), - "", - null, - null, - 0, - 1000, - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true))) + Mockito.any(), + Mockito.eq(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME)), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1000))) .thenReturn( new SearchResult() .setNumEntities(1) @@ -93,16 +90,13 @@ private void mockGetNodeInfo( .setAspects(new EnvelopedAspectMap(nodeInfoAspects))); Mockito.when( mockSearchService.search( - List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), - "", - null, - null, - 0, - 1000, - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true))) + Mockito.any(), + Mockito.eq(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME)), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1000))) .thenReturn( new SearchResult() .setNumEntities(1) @@ -171,7 +165,8 @@ public void testExecuteFirstTime() throws Exception { AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); RestoreGlossaryIndices restoreIndicesStep = - new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + new RestoreGlossaryIndices( + Mockito.mock(OperationContext.class), mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)) @@ -254,7 +249,8 @@ public void testExecutesWithNewVersion() throws Exception { AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); RestoreGlossaryIndices restoreIndicesStep = - new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + new RestoreGlossaryIndices( + Mockito.mock(OperationContext.class), mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)) @@ -319,7 +315,8 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { .thenReturn(response); RestoreGlossaryIndices restoreIndicesStep = - new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + new RestoreGlossaryIndices( + Mockito.mock(OperationContext.class), mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(0)) @@ -328,22 +325,22 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); Mockito.verify(mockSearchService, Mockito.times(0)) .search( - List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), - "", - null, - null, - 0, - 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.any(), + Mockito.eq(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME)), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1000)); Mockito.verify(mockSearchService, Mockito.times(0)) .search( - List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), - "", - null, - null, - 0, - 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.any(), + Mockito.eq(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME)), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1000)); Mockito.verify(mockService, Mockito.times(0)) .ingestProposal( Mockito.any(MetadataChangeProposal.class), diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index da4fac1451e46..1c4f2824c6357 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -11,10 +11,12 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.inject.name.Named; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.exception.DataHubGraphQLError; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.ExecutionResult; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.api.trace.Span; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletRequest; @@ -25,6 +27,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.http.HttpEntity; import org.springframework.http.HttpStatus; @@ -47,6 +50,11 @@ public GraphQLController() { @Inject AuthorizerChain _authorizerChain; + @Nonnull + @Inject + @Named("systemOperationContext") + private OperationContext systemOperationContext; + @PostMapping(value = "/graphql", produces = "application/json;charset=utf-8") CompletableFuture> postGraphQL(HttpEntity httpEntity) { @@ -95,7 +103,8 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt * Init QueryContext */ Authentication authentication = AuthenticationContext.getAuthentication(); - SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + SpringQueryContext context = + new SpringQueryContext(true, authentication, _authorizerChain, systemOperationContext); Span.current().setAttribute("actor.urn", context.getActorUrn()); return CompletableFuture.supplyAsync( diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java index 379521eda0c1a..20e06945e1d6b 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java @@ -3,34 +3,27 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; +import io.datahubproject.metadata.context.OperationContext; +import javax.annotation.Nonnull; +import lombok.Getter; +@Getter public class SpringQueryContext implements QueryContext { private final boolean isAuthenticated; private final Authentication authentication; private final Authorizer authorizer; + @Nonnull private final OperationContext operationContext; public SpringQueryContext( final boolean isAuthenticated, final Authentication authentication, - final Authorizer authorizer) { + final Authorizer authorizer, + @Nonnull final OperationContext systemOperationContext) { this.isAuthenticated = isAuthenticated; this.authentication = authentication; this.authorizer = authorizer; - } - - @Override - public boolean isAuthenticated() { - return this.isAuthenticated; - } - - @Override - public Authentication getAuthentication() { - return this.authentication; - } - - @Override - public Authorizer getAuthorizer() { - return this.authorizer; + this.operationContext = + OperationContext.asSession(systemOperationContext, authorizer, authentication, true); } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java index 1e375f90fc38a..20fdb0db0bd09 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; @@ -71,6 +72,7 @@ import org.springframework.http.ResponseEntity; public class EntityApiDelegateImpl { + private final OperationContext systemOperationContext; private final EntityRegistry _entityRegistry; private final EntityService _entityService; private final SearchService _searchService; @@ -85,6 +87,7 @@ public class EntityApiDelegateImpl { private final StackWalker walker = StackWalker.getInstance(); public EntityApiDelegateImpl( + OperationContext systemOperationContext, EntityService entityService, SearchService searchService, EntitiesController entitiesController, @@ -93,6 +96,7 @@ public EntityApiDelegateImpl( Class reqClazz, Class respClazz, Class scrollRespClazz) { + this.systemOperationContext = systemOperationContext; this._entityService = entityService; this._searchService = searchService; this._entityRegistry = entityService.getEntityRegistry(); @@ -458,7 +462,13 @@ public ResponseEntity scroll( @Valid SortOrder sortOrder, @Valid String query) { + SearchFlags searchFlags = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + Authentication authentication = AuthenticationContext.getAuthentication(); + OperationContext opContext = + OperationContext.asSession( + systemOperationContext, _authorizationChain, authentication, true); com.linkedin.metadata.models.EntitySpec entitySpec = OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); checkScrollAuthorized(authentication, entitySpec); @@ -470,19 +480,16 @@ public ResponseEntity scroll( com.linkedin.metadata.query.filter.SortOrder.valueOf( Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); - SearchFlags searchFlags = - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); - ScrollResult result = _searchService.scrollAcrossEntities( + opContext.withSearchFlags(flags -> searchFlags), List.of(entitySpec.getName()), query, null, sortCriterion, scrollId, null, - count, - searchFlags); + count); String[] urns = result.getEntities().stream() diff --git a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache index 7ac087f220561..f9717b8cb16fb 100644 --- a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache +++ b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache @@ -1,6 +1,7 @@ package {{package}}; import io.datahubproject.openapi.v2.delegates.EntityApiDelegateImpl; +import io.datahubproject.metadata.context.OperationContext; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; import io.datahubproject.openapi.entities.EntitiesController; @@ -91,12 +92,14 @@ public class {{classname}}Controller implements {{classname}} { private final EntityApiDelegateImpl<{{requestClass}}, {{responseClass}}, {{scrollResponseClass}}> delegate; @org.springframework.beans.factory.annotation.Autowired - public {{classname}}Controller(ObjectMapper objectMapper, HttpServletRequest request, EntityService entityService, + public {{classname}}Controller(ObjectMapper objectMapper, HttpServletRequest request, + @org.springframework.beans.factory.annotation.Qualifier("systemOperationContext") OperationContext systemOperationContext, + EntityService entityService, SearchService searchService, EntitiesController v1Controller, AuthorizerChain authorizationChain, @Value("${authorization.restApiAuthorization:false}") boolean restApiAuthorizationEnabled) { this.objectMapper = objectMapper; this.request = request; - this.delegate = new EntityApiDelegateImpl<{{requestClass}}, {{responseClass}}, {{scrollResponseClass}}>(entityService, searchService, v1Controller, + this.delegate = new EntityApiDelegateImpl<{{requestClass}}, {{responseClass}}, {{scrollResponseClass}}>(systemOperationContext, entityService, searchService, v1Controller, restApiAuthorizationEnabled, authorizationChain, {{requestClass}}.class, {{responseClass}}.class, {{scrollResponseClass}}.class); } {{#isJava8or11}} diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java index 920a13d998985..4ff40b4beed5a 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java @@ -27,11 +27,13 @@ import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeline.TimelineService; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.relationships.RelationshipsController; import io.datahubproject.openapi.timeline.TimelineController; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; @@ -62,7 +64,7 @@ public EntityService entityService(final EntityRegistry mockRegistry) { public SearchService searchService() { SearchService searchService = mock(SearchService.class); when(searchService.scrollAcrossEntities( - anyList(), any(), any(), any(), any(), any(), anyInt(), any())) + any(OperationContext.class), anyList(), any(), any(), any(), any(), any(), anyInt())) .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); return searchService; @@ -134,4 +136,9 @@ public EntitiesController entitiesController() { @MockBean public TimelineController timelineController; @MockBean public RelationshipsController relationshipsController; + + @Bean(name = "systemOperationContext") + public OperationContext operationContext(final EntityRegistry entityRegistry) { + return TestOperationContexts.systemContextNoSearchAuthorization(entityRegistry); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index 1d6d3067d23f7..f3617801e6b55 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -3,9 +3,9 @@ import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.SearchFlags; @@ -15,6 +15,7 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.timeseries.TimeseriesIndexSizeResult; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; @@ -46,7 +47,8 @@ name = "ElasticSearchOperations", description = "An API for managing your elasticsearch instance") public class OperationsController { - private final AuthorizerChain authorizerChain; + private final Authorizer authorizerChain; + private final OperationContext systemOperationContext; @Value("${authorization.restApiAuthorization:false}") private boolean restApiAuthorizationEnabled; @@ -57,11 +59,12 @@ public class OperationsController { private final EntitySearchService searchService; public OperationsController( - AuthorizerChain authorizerChain, + OperationContext systemOperationContext, SystemMetadataService systemMetadataService, TimeseriesAspectService timeseriesAspectService, EntitySearchService searchService) { - this.authorizerChain = authorizerChain; + this.systemOperationContext = systemOperationContext; + this.authorizerChain = systemOperationContext.getAuthorizerContext().getAuthorizer(); this.systemMetadataService = systemMetadataService; this.timeseriesAspectService = timeseriesAspectService; this.searchService = searchService; @@ -231,14 +234,19 @@ public ResponseEntity explainSearchQuery( log.error("{} is not authorized to get timeseries index sizes", actorUrnStr); return ResponseEntity.status(HttpStatus.FORBIDDEN).body(null); } + OperationContext opContext = + systemOperationContext + .asSession(authorizerChain, authentication) + .withSearchFlags(flags -> searchFlags); + ExplainResponse response = searchService.explain( + opContext, query, documentId, entityName, filters, sortCriterion, - searchFlags, scrollId, keepAlive, size, diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 656d6542483cf..7a11d60a567f9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -37,6 +37,7 @@ import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.openapi.v2.models.GenericEntity; import io.datahubproject.openapi.v2.models.GenericScrollResult; import io.swagger.v3.oas.annotations.Operation; @@ -55,6 +56,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.DeleteMapping; @@ -82,6 +84,10 @@ public class EntityController { @Autowired private boolean restApiAuthorizationEnabled; @Autowired private ObjectMapper objectMapper; + @Qualifier("systemOperationContext") + @Autowired + private OperationContext systemOperationContext; + @Tag(name = "Generic Entities", description = "API for interacting with generic entities.") @GetMapping(value = "/{entityName}", produces = MediaType.APPLICATION_JSON_VALUE) @Operation(summary = "Scroll entities") @@ -100,28 +106,31 @@ public ResponseEntity> getEntities( EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + Authentication authentication = AuthenticationContext.getAuthentication(); if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); checkAuthorized( authorizationChain, authentication.getActor(), entitySpec, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); } + OperationContext opContext = + OperationContext.asSession( + systemOperationContext, authorizationChain, authentication, true); // TODO: support additional and multiple sort params SortCriterion sortCriterion = SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder)); ScrollResult result = searchService.scrollAcrossEntities( + opContext.withSearchFlags(flags -> DEFAULT_SEARCH_FLAGS), List.of(entitySpec.getName()), query, null, sortCriterion, scrollId, null, - count, - DEFAULT_SEARCH_FLAGS); + count); return ResponseEntity.ok( GenericScrollResult.builder() diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json index eac1cc690a60d..c5fc14c45c522 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json @@ -74,6 +74,10 @@ }, { "name" : "limit", "type" : "int" + }, { + "name" : "searchFlags", + "type" : "com.linkedin.metadata.query.SearchFlags", + "optional" : true } ], "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { @@ -114,6 +118,10 @@ }, { "name" : "limit", "type" : "int" + }, { + "name" : "searchFlags", + "type" : "com.linkedin.metadata.query.SearchFlags", + "optional" : true } ], "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index fe16d24e3475a..4375dfa587e7d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -1222,6 +1222,24 @@ "items" : "Owner" }, "doc" : "List of owners of the entity." + }, { + "name" : "ownerTypes", + "type" : { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Urn" + } + }, + "doc" : "Owners to ownership type map, populated with mutation hook.", + "default" : { }, + "optional" : true, + "Searchable" : { + "/*" : { + "fieldType" : "OBJECT", + "queryByDefault" : false + } + } }, { "name" : "lastModified", "type" : "AuditStamp", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 55fed125936eb..f6ccf6a9c6326 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -1258,6 +1258,24 @@ "items" : "Owner" }, "doc" : "List of owners of the entity." + }, { + "name" : "ownerTypes", + "type" : { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Urn" + } + }, + "doc" : "Owners to ownership type map, populated with mutation hook.", + "default" : { }, + "optional" : true, + "Searchable" : { + "/*" : { + "fieldType" : "OBJECT", + "queryByDefault" : false + } + } }, { "name" : "lastModified", "type" : "AuditStamp", @@ -5785,6 +5803,18 @@ "type" : "GroupingSpec", "doc" : "Instructions for grouping results before returning", "optional" : true + }, { + "name" : "includeSoftDeleted", + "type" : "boolean", + "doc" : "include soft deleted entities in results", + "default" : false, + "optional" : true + }, { + "name" : "includeRestricted", + "type" : "boolean", + "doc" : "include restricted entities in results (default is to filter)", + "default" : false, + "optional" : true } ] }, { "type" : "enum", @@ -6111,6 +6141,14 @@ "name" : "score", "type" : "double", "optional" : true + }, { + "name" : "restrictedAspects", + "type" : { + "type" : "array", + "items" : "string" + }, + "doc" : "A list of the the restricted aspects on the entity.\nIf the key aspect is present, assume ALL aspects should be restricted including the entity's Urn.", + "optional" : true } ] } ], "fields" : [ { @@ -6420,6 +6458,10 @@ }, { "name" : "limit", "type" : "int" + }, { + "name" : "searchFlags", + "type" : "com.linkedin.metadata.query.SearchFlags", + "optional" : true } ], "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { @@ -6460,6 +6502,10 @@ }, { "name" : "limit", "type" : "int" + }, { + "name" : "searchFlags", + "type" : "com.linkedin.metadata.query.SearchFlags", + "optional" : true } ], "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index f9f1999923ec0..2168ee950957a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -964,6 +964,24 @@ "items" : "Owner" }, "doc" : "List of owners of the entity." + }, { + "name" : "ownerTypes", + "type" : { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Urn" + } + }, + "doc" : "Owners to ownership type map, populated with mutation hook.", + "default" : { }, + "optional" : true, + "Searchable" : { + "/*" : { + "fieldType" : "OBJECT", + "queryByDefault" : false + } + } }, { "name" : "lastModified", "type" : "AuditStamp", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index 88dad7e49152a..2c093f753e3a6 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -964,6 +964,24 @@ "items" : "Owner" }, "doc" : "List of owners of the entity." + }, { + "name" : "ownerTypes", + "type" : { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Urn" + } + }, + "doc" : "Owners to ownership type map, populated with mutation hook.", + "default" : { }, + "optional" : true, + "Searchable" : { + "/*" : { + "fieldType" : "OBJECT", + "queryByDefault" : false + } + } }, { "name" : "lastModified", "type" : "AuditStamp", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 4d34126cd59fc..13172d3e4d5ec 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -1258,6 +1258,24 @@ "items" : "Owner" }, "doc" : "List of owners of the entity." + }, { + "name" : "ownerTypes", + "type" : { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Urn" + } + }, + "doc" : "Owners to ownership type map, populated with mutation hook.", + "default" : { }, + "optional" : true, + "Searchable" : { + "/*" : { + "fieldType" : "OBJECT", + "queryByDefault" : false + } + } }, { "name" : "lastModified", "type" : "AuditStamp", diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index 86336755dc095..9bee54da9ff6e 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -9,6 +9,7 @@ dependencies { api project(path: ':metadata-service:restli-api', configuration: 'restClient') api project(':metadata-events:mxe-schemas') api project(':metadata-utils') + api project(':metadata-operation-context') implementation project(':metadata-service:configuration') implementation externalDependency.caffeine diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 65169344776b7..07ecdf2408d9d 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.query.ListUrnsResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.LineageScrollResult; @@ -32,6 +31,7 @@ import com.linkedin.mxe.PlatformEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collection; import java.util.List; @@ -49,7 +49,7 @@ public interface EntityClient { default void postConstruct(AspectRetriever aspectRetriever) {} @Nullable - public EntityResponse getV2( + EntityResponse getV2( @Nonnull String entityName, @Nonnull final Urn urn, @Nullable final Set aspectNames, @@ -58,11 +58,11 @@ public EntityResponse getV2( @Nonnull @Deprecated - public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; @Nonnull - public Map batchGetV2( + Map batchGetV2( @Nonnull String entityName, @Nonnull final Set urns, @Nullable final Set aspectNames, @@ -79,7 +79,7 @@ Map batchGetVersionedV2( @Nonnull @Deprecated - public Map batchGet( + Map batchGet( @Nonnull final Set urns, @Nonnull final Authentication authentication) throws RemoteInvocationException; @@ -93,13 +93,13 @@ public Map batchGet( * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete( + AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, @Nonnull int limit, - @Nullable String field, - @Nonnull Authentication authentication) + @Nullable String field) throws RemoteInvocationException; /** @@ -111,12 +111,12 @@ public AutoCompleteResult autoComplete( * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete( + AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull Authentication authentication) + @Nonnull int limit) throws RemoteInvocationException; /** @@ -130,13 +130,13 @@ public AutoCompleteResult autoComplete( * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse( + BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String path, @Nullable Map requestFilters, int start, - int limit, - @Nonnull Authentication authentication) + int limit) throws RemoteInvocationException; /** @@ -151,15 +151,14 @@ public BrowseResult browse( * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2( + BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException; /** @@ -174,30 +173,29 @@ public BrowseResultV2 browseV2( * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2( + BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException; @Deprecated - public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) + void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException; @Deprecated - public void updateWithSystemMetadata( + void updateWithSystemMetadata( @Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, @Nonnull final Authentication authentication) throws RemoteInvocationException; @Deprecated - public void batchUpdate( + void batchUpdate( @Nonnull final Set entities, @Nonnull final Authentication authentication) throws RemoteInvocationException; @@ -208,19 +206,17 @@ public void batchUpdate( * @param requestFilters search filters * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags configuration flags for the search request * @return a set of search results * @throws RemoteInvocationException */ @Nonnull - public SearchResult search( + SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Map requestFilters, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException; /** @@ -235,12 +231,12 @@ public SearchResult search( * @throws RemoteInvocationException */ @Nonnull - public ListResult list( + ListResult list( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nullable Map requestFilters, int start, - int count, - @Nonnull Authentication authentication) + int count) throws RemoteInvocationException; /** @@ -251,20 +247,18 @@ public ListResult list( * @param sortCriterion sort criterion * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags configuration flags for the search request * @return Snapshot key * @throws RemoteInvocationException */ @Nonnull - public SearchResult search( + SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, SortCriterion sortCriterion, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException; /** @@ -275,20 +269,18 @@ public SearchResult search( * @param filter search filters * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags configuration flags for the search request * @return Snapshot key * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities( + SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull Authentication authentication) + @Nullable SortCriterion sortCriterion) throws RemoteInvocationException; /** @@ -299,21 +291,19 @@ public SearchResult searchAcrossEntities( * @param filter search filters * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags configuration flags for the search request * @param facets list of facets we want aggregations for * @return Snapshot key * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities( + SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, @Nullable SortCriterion sortCriterion, - @Nonnull Authentication authentication, List facets) throws RemoteInvocationException; @@ -331,14 +321,13 @@ public SearchResult searchAcrossEntities( */ @Nonnull ScrollResult scrollAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull Authentication authentication) + int count) throws RemoteInvocationException; /** @@ -353,12 +342,12 @@ ScrollResult scrollAcrossEntities( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param start index to start the search from * @param count the number of search hits to return - * @param searchFlags configuration flags for the search request * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage( + LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -367,9 +356,7 @@ public LineageSearchResult searchAcrossLineage( @Nullable Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException; /** @@ -386,13 +373,12 @@ public LineageSearchResult searchAcrossLineage( * @param count the number of search hits to return * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from - * @param searchFlags configuration flags for the search request - * @param authentication a reference to an authentication * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage( + LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -403,9 +389,7 @@ public LineageSearchResult searchAcrossLineage( int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable final Long endTimeMillis) throws RemoteInvocationException; /** @@ -428,6 +412,7 @@ public LineageSearchResult searchAcrossLineage( */ @Nonnull LineageScrollResult scrollAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -439,9 +424,7 @@ LineageScrollResult scrollAcrossLineage( @Nonnull String keepAlive, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable final Long endTimeMillis) throws RemoteInvocationException; /** @@ -452,19 +435,19 @@ LineageScrollResult scrollAcrossLineage( * @throws RemoteInvocationException */ @Nonnull - public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) + StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; - public void setWritable(boolean canWrite, @Nonnull Authentication authentication) + void setWritable(boolean canWrite, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nonnull - public Map batchGetTotalEntityCount( - @Nonnull List entityName, @Nonnull Authentication authentication) + Map batchGetTotalEntityCount( + @Nonnull OperationContext opContext, @Nonnull List entityName) throws RemoteInvocationException; /** List all urns existing for a particular Entity type. */ - public ListUrnsResult listUrns( + ListUrnsResult listUrns( @Nonnull final String entityName, final int start, final int count, @@ -472,12 +455,11 @@ public ListUrnsResult listUrns( throws RemoteInvocationException; /** Hard delete an entity with a particular urn. */ - public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** Delete all references to an entity with a particular urn. */ - public void deleteEntityReferences( - @Nonnull final Urn urn, @Nonnull final Authentication authentication) + void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -492,13 +474,13 @@ public void deleteEntityReferences( * @throws RemoteInvocationException */ @Nonnull - public SearchResult filter( + SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nonnull Authentication authentication) + int count) throws RemoteInvocationException; /** @@ -510,12 +492,12 @@ public SearchResult filter( * @throws RemoteInvocationException */ @Nonnull - public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) + boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspect( + VersionedAspect getAspect( @Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @@ -524,7 +506,7 @@ public VersionedAspect getAspect( @Nullable @Deprecated - public VersionedAspect getAspectOrNull( + VersionedAspect getAspectOrNull( @Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @@ -545,7 +527,7 @@ default List getTimeseriesAspectValues( urn, entity, aspect, startTimeMillis, endTimeMillis, limit, filter, null, authentication); } - public List getTimeseriesAspectValues( + List getTimeseriesAspectValues( @Nonnull String urn, @Nonnull String entity, @Nonnull String aspect, @@ -609,7 +591,7 @@ default List batchIngestProposals( @Nonnull @Deprecated - public Optional getVersionedAspect( + Optional getVersionedAspect( @Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @@ -618,21 +600,21 @@ public Optional getVersionedAspect( throws RemoteInvocationException; @Deprecated - public DataMap getRawAspect( + DataMap getRawAspect( @Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException; - public void producePlatformEvent( + void producePlatformEvent( @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, @Nonnull Authentication authentication) throws Exception; - public void rollbackIngestion( + void rollbackIngestion( @Nonnull String runId, @Nonnull Authorizer authorizer, @Nonnull Authentication authentication) throws Exception; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java index b5c6beaf4aa5b..2fcb02946ca46 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java @@ -11,6 +11,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collection; import java.util.Map; import java.util.Optional; @@ -28,14 +29,19 @@ public class EntityClientCache { @NonNull private EntityClientCacheConfig config; @NonNull private final ClientCache cache; - @NonNull private BiFunction, Set, Map> loadFunction; + @NonNull private Function> loadFunction; - public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { - return batchGetV2(Set.of(urn), aspectNames).get(urn); + public EntityResponse getV2( + @Nonnull OperationContext opContext, + @Nonnull final Urn urn, + @Nonnull final Set aspectNames) { + return batchGetV2(opContext, Set.of(urn), aspectNames).get(urn); } public Map batchGetV2( - @Nonnull final Set urns, @Nonnull final Set aspectNames) { + @Nonnull OperationContext opContext, + @Nonnull final Set urns, + @Nonnull final Set aspectNames) { final Map response; if (config.isEnabled()) { @@ -43,7 +49,14 @@ public Map batchGetV2( urns.stream() .flatMap( urn -> - aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) + aspectNames.stream() + .map( + a -> + Key.builder() + .contextId(opContext.getEntityContextId()) + .urn(urn) + .aspectName(a) + .build())) .collect(Collectors.toSet()); Map envelopedAspects = cache.getAll(keys); @@ -61,7 +74,13 @@ public Map batchGetV2( response = responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); } else { - response = loadFunction.apply(urns, aspectNames); + response = + loadFunction.apply( + CollectionKey.builder() + .contextId(opContext.getEntityContextId()) + .urns(urns) + .aspectNames(aspectNames) + .build()); } return response; @@ -93,39 +112,16 @@ public EntityClientCache build(Class metricClazz) { // batch loads data from entity client (restli or java) Function, Map> loader = (Iterable keys) -> { - Map> keysByEntity = - StreamSupport.stream(keys.spliterator(), false) - .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); - - Map results = - keysByEntity.entrySet().stream() - .flatMap( - entry -> { - Set urns = - entry.getValue().stream() - .map(Key::getUrn) - .collect(Collectors.toSet()); - Set aspects = - entry.getValue().stream() - .map(Key::getAspectName) - .collect(Collectors.toSet()); - return loadFunction.apply(urns, aspects).entrySet().stream(); - }) - .flatMap( - resp -> - resp.getValue().getAspects().values().stream() - .map( - envAspect -> { - Key key = - Key.builder() - .urn(resp.getKey()) - .aspectName(envAspect.getName()) - .build(); - return Map.entry(key, envAspect); - })) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return results; + Map>> keysByContextEntity = groupByContextEntity(keys); + + // load responses by context and combine + return keysByContextEntity.entrySet().stream() + .flatMap( + entry -> + loadByEntity(entry.getKey(), entry.getValue(), loadFunction) + .entrySet() + .stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }; // ideally the cache time comes from caching headers from service, but configuration driven @@ -149,9 +145,64 @@ public EntityClientCache build(Class metricClazz) { } } + private static Map>> groupByContextEntity( + Iterable keys) { + // group by context + Map> byContext = + StreamSupport.stream(keys.spliterator(), false) + .collect(Collectors.groupingBy(Key::getContextId, Collectors.toSet())); + + // then by entity + return byContext.entrySet().stream() + .map( + contextSet -> + Pair.of( + contextSet.getKey(), + contextSet.getValue().stream() + .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())))) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } + + private static Map loadByEntity( + String contextId, + Map> keysByEntity, + Function> loadFunction) { + return keysByEntity.entrySet().stream() + .flatMap( + entry -> { + Set urns = + entry.getValue().stream().map(Key::getUrn).collect(Collectors.toSet()); + Set aspects = + entry.getValue().stream().map(Key::getAspectName).collect(Collectors.toSet()); + return loadFunction + .apply( + CollectionKey.builder() + .contextId(contextId) + .urns(urns) + .aspectNames(aspects) + .build()) + .entrySet() + .stream(); + }) + .flatMap( + resp -> + resp.getValue().getAspects().values().stream() + .map( + envAspect -> { + Key key = + Key.builder() + .urn(resp.getKey()) + .aspectName(envAspect.getName()) + .build(); + return Map.entry(key, envAspect); + })) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + @Data @Builder protected static class Key { + private final String contextId; private final Urn urn; private final String aspectName; @@ -159,4 +210,12 @@ public String getEntityName() { return urn.getEntityType(); } } + + @Data + @Builder + public static class CollectionKey { + private final String contextId; + private final Set urns; + private final Set aspectNames; + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 653ef046ffc02..4dfe36b49bf11 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -75,6 +75,7 @@ import com.linkedin.restli.client.Client; import com.linkedin.restli.client.RestLiResponseException; import com.linkedin.restli.common.HttpStatus; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.Collection; import java.util.HashMap; @@ -114,6 +115,7 @@ public RestliEntityClient( super(restliClient, backoffPolicy, retryCount); } + @Override @Nullable public EntityResponse getV2( @Nonnull String entityName, @@ -126,6 +128,7 @@ public EntityResponse getV2( return sendClientRequest(requestBuilder, authentication).getEntity(); } + @Override @Nonnull public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { @@ -143,6 +146,7 @@ public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authenti * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ + @Override @Nonnull public Map batchGet( @Nonnull final Set urns, @Nonnull final Authentication authentication) @@ -195,6 +199,7 @@ public Map batchGet( * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ + @Override @Nonnull public Map batchGetV2( @Nonnull String entityName, @@ -237,6 +242,7 @@ public Map batchGetV2( * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ + @Override @Nonnull public Map batchGetVersionedV2( @Nonnull String entityName, @@ -280,14 +286,15 @@ public Map batchGetVersionedV2( * @param field the field to autocomplete against, e.g. 'name' * @throws RemoteInvocationException */ + @Override @Nonnull public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, @Nonnull int limit, - @Nullable String field, - @Nonnull final Authentication authentication) + @Nullable String field) throws RemoteInvocationException { EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -297,7 +304,7 @@ public AutoCompleteResult autoComplete( .fieldParam(field) .filterParam(filterOrDefaultEmptyFilter(requestFilters)) .limitParam(limit); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -309,13 +316,14 @@ public AutoCompleteResult autoComplete( * @param limit max number of autocomplete results * @throws RemoteInvocationException */ + @Override @Nonnull public AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String query, @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull final Authentication authentication) + @Nonnull int limit) throws RemoteInvocationException { EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -324,7 +332,7 @@ public AutoCompleteResult autoComplete( .queryParam(query) .filterParam(filterOrDefaultEmptyFilter(requestFilters)) .limitParam(limit); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -337,14 +345,15 @@ public AutoCompleteResult autoComplete( * @param limit max number of datasets * @throws RemoteInvocationException */ + @Override @Nonnull public BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityType, @Nonnull String path, @Nullable Map requestFilters, int start, - int limit, - @Nonnull final Authentication authentication) + int limit) throws RemoteInvocationException { EntitiesDoBrowseRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -356,7 +365,7 @@ public BrowseResult browse( if (requestFilters != null) { requestBuilder.filterParam(newFilter(requestFilters)); } - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -370,34 +379,34 @@ public BrowseResult browse( * @param count max number of results requested * @throws RemoteInvocationException */ + @Override @Nonnull public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) { + int count) { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } @Nonnull @Override public BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } + @Override public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoIngestRequestBuilder requestBuilder = @@ -405,6 +414,7 @@ public void update(@Nonnull final Entity entity, @Nonnull final Authentication a sendClientRequest(requestBuilder, authentication); } + @Override public void updateWithSystemMetadata( @Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, @@ -424,6 +434,7 @@ public void updateWithSystemMetadata( sendClientRequest(requestBuilder, authentication); } + @Override public void batchUpdate( @Nonnull final Set entities, @Nonnull final Authentication authentication) throws RemoteInvocationException { @@ -440,22 +451,22 @@ public void batchUpdate( * @param requestFilters search filters * @param start start offset for search results * @param count max number of search results requested - * @param searchFlags configuration flags for the search request * @return a set of search results * @throws RemoteInvocationException */ @Nonnull @Override public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Map requestFilters, int start, - int count, - @Nonnull final Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException { + SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); + final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS .actionSearch() @@ -465,11 +476,9 @@ public SearchResult search( .startParam(start) .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) .countParam(count); - if (searchFlags != null) { - requestBuilder.searchFlagsParam(searchFlags); - } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -481,13 +490,14 @@ public SearchResult search( * @return a set of list results * @throws RemoteInvocationException */ + @Override @Nonnull public ListResult list( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nullable Map requestFilters, int start, - int count, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { final EntitiesDoListRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -497,7 +507,7 @@ public ListResult list( .startParam(start) .countParam(count); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -514,16 +524,16 @@ public ListResult list( @Nonnull @Override public SearchResult search( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, SortCriterion sortCriterion, int start, - int count, - @Nonnull final Authentication authentication, - @Nullable SearchFlags searchFlags) + int count) throws RemoteInvocationException { + SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS .actionSearch() @@ -547,22 +557,22 @@ public SearchResult search( } } - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } + @Override @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication) + @Nullable SortCriterion sortCriterion) throws RemoteInvocationException { return searchAcrossEntities( - entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + opContext, entities, input, filter, start, count, sortCriterion, null); } /** @@ -577,19 +587,20 @@ public SearchResult searchAcrossEntities( * @return Snapshot key * @throws RemoteInvocationException */ + @Override @Nonnull public SearchResult searchAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, int start, int count, - @Nullable SearchFlags searchFlags, @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication, @Nullable List facets) throws RemoteInvocationException { + SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); final EntitiesDoSearchAcrossEntitiesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS .actionSearchAcrossEntities() @@ -611,21 +622,21 @@ public SearchResult searchAcrossEntities( requestBuilder.sortParam(sortCriterion); } - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } @Nonnull @Override public ScrollResult scrollAcrossEntities( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull Authentication authentication) + int count) throws RemoteInvocationException { + final SearchFlags searchFlags = opContext.getSearchContext().getSearchFlags(); final EntitiesDoScrollAcrossEntitiesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count); @@ -645,12 +656,13 @@ public ScrollResult scrollAcrossEntities( requestBuilder.keepAliveParam(keepAlive); } - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } @Nonnull @Override public LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -659,9 +671,7 @@ public LineageSearchResult searchAcrossLineage( @Nullable Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = @@ -679,16 +689,15 @@ public LineageSearchResult searchAcrossLineage( if (filter != null) { requestBuilder.filterParam(filter); } - if (searchFlags != null) { - requestBuilder.searchFlagsParam(searchFlags); - } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } @Nonnull @Override public LineageSearchResult searchAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -699,9 +708,7 @@ public LineageSearchResult searchAcrossLineage( int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, - @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable final Long endTimeMillis) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = @@ -725,15 +732,14 @@ public LineageSearchResult searchAcrossLineage( if (endTimeMillis != null) { requestBuilder.endTimeMillisParam(endTimeMillis); } - if (searchFlags != null) { - requestBuilder.searchFlagsParam(searchFlags); - } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } @Override public LineageScrollResult scrollAcrossLineage( + @Nonnull OperationContext opContext, @Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, @Nonnull List entities, @@ -745,9 +751,7 @@ public LineageScrollResult scrollAcrossLineage( @Nonnull String keepAlive, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, - @Nullable final SearchFlags searchFlags, - @Nonnull final Authentication authentication) + @Nullable final Long endTimeMillis) throws RemoteInvocationException { final EntitiesDoScrollAcrossLineageRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -773,11 +777,9 @@ public LineageScrollResult scrollAcrossLineage( if (endTimeMillis != null) { requestBuilder.endTimeMillisParam(endTimeMillis); } - if (searchFlags != null) { - requestBuilder.searchFlagsParam(searchFlags); - } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** @@ -802,18 +804,20 @@ public void setWritable(boolean canWrite, @Nonnull final Authentication authenti sendClientRequest(requestBuilder, authentication); } + @Override @Nonnull public Map batchGetTotalEntityCount( - @Nonnull List entityName, @Nonnull final Authentication authentication) + @Nonnull OperationContext opContext, @Nonnull List entityName) throws RemoteInvocationException { EntitiesDoBatchGetTotalEntityCountRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS .actionBatchGetTotalEntityCount() .entitiesParam(new StringArray(entityName)); - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } /** List all urns existing for a particular Entity type. */ + @Override public ListUrnsResult listUrns( @Nonnull final String entityName, final int start, @@ -849,12 +853,12 @@ public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication aut @Nonnull @Override public SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, int start, - int count, - @Nonnull final Authentication authentication) + int count) throws RemoteInvocationException { EntitiesDoFilterRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS @@ -866,7 +870,7 @@ public SearchResult filter( if (sortCriterion != null) { requestBuilder.sortParam(sortCriterion); } - return sendClientRequest(requestBuilder, authentication).getEntity(); + return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } @Nonnull @@ -885,6 +889,7 @@ public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentica * @return list of paths given urn * @throws RemoteInvocationException on remote request error. */ + @Override @Nonnull public VersionedAspect getAspect( @Nonnull String urn, @@ -906,6 +911,7 @@ public VersionedAspect getAspect( * @return list of paths given urn * @throws RemoteInvocationException on remote request error. */ + @Override @Nullable public VersionedAspect getAspectOrNull( @Nonnull String urn, @@ -940,6 +946,7 @@ public VersionedAspect getAspectOrNull( * @return the list of EnvelopedAspect values satisfying the input parameters. * @throws RemoteInvocationException on remote request error. */ + @Override @Nonnull public List getTimeseriesAspectValues( @Nonnull String urn, @@ -1002,6 +1009,7 @@ public String ingestProposal( return sendClientRequest(requestBuilder, authentication).getEntity(); } + @Override public Optional getVersionedAspect( @Nonnull String urn, @Nonnull String aspect, diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index 243e8a40bf4b7..72af56b8c7fc7 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -5,16 +5,14 @@ import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.search.ScrollResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; -import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -23,65 +21,43 @@ public interface SystemEntityClient extends EntityClient { EntityClientCache getEntityClientCache(); - Authentication getSystemAuthentication(); + @Nonnull + ConcurrentHashMap getOperationContextMap(); - /** - * Searches for entities matching to a given query and filters across multiple entity types - * - * @param entities entity types to search (if empty, searches all entities) - * @param input search query - * @param filter search filters - * @param scrollId opaque scroll ID indicating offset - * @param keepAlive string representation of time to keep point in time alive, ex: 5m - * @param count max number of search results requested - * @return Snapshot key - * @throws RemoteInvocationException - */ @Nonnull - default ScrollResult scrollAcrossEntities( - @Nonnull List entities, - @Nonnull String input, - @Nullable Filter filter, - @Nullable String scrollId, - @Nullable String keepAlive, - int count, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - return scrollAcrossEntities( - entities, - input, - filter, - scrollId, - keepAlive, - count, - searchFlags, - getSystemAuthentication()); + OperationContext getSystemOperationContext(); + + default Authentication getSystemAuthentication() { + return getSystemOperationContext().getAuthentication(); } /** * Builds the cache * - * @param systemAuthentication system authentication * @param cacheConfig cache configuration * @return the cache */ default EntityClientCache buildEntityClientCache( - Class metricClazz, - Authentication systemAuthentication, - EntityClientCacheConfig cacheConfig) { + Class metricClazz, EntityClientCacheConfig cacheConfig) { return EntityClientCache.builder() .config(cacheConfig) .loadFunction( - (Set urns, Set aspectNames) -> { + (EntityClientCache.CollectionKey collectionKey) -> { try { - String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + String entityName = + collectionKey.getUrns().stream().findFirst().map(Urn::getEntityType).get(); - if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { + if (collectionKey.getUrns().stream() + .anyMatch(urn -> !urn.getEntityType().equals(entityName))) { throw new IllegalArgumentException( "Urns must be of the same entity type. RestliEntityClient API limitation."); } - return batchGetV2(entityName, urns, aspectNames, systemAuthentication); + return batchGetV2( + entityName, + collectionKey.getUrns(), + collectionKey.getAspectNames(), + getSystemOperationContext().getAuthentication()); } catch (RemoteInvocationException | URISyntaxException e) { throw new RuntimeException(e); } @@ -101,7 +77,7 @@ default EntityClientCache buildEntityClientCache( @Nullable default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set aspectNames) throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().getV2(urn, aspectNames); + return getEntityClientCache().getV2(getSystemOperationContext(), urn, aspectNames); } /** @@ -115,7 +91,7 @@ default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set aspectNames) default Map batchGetV2( @Nonnull Set urns, @Nonnull Set aspectNames) throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().batchGetV2(urns, aspectNames); + return getEntityClientCache().batchGetV2(getSystemOperationContext(), urns, aspectNames); } default void producePlatformEvent( diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index 0f179c4da7b74..7aad31b5beeba 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -1,9 +1,10 @@ package com.linkedin.entity.client; -import com.datahub.authentication.Authentication; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.restli.client.Client; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; import lombok.Getter; @@ -11,17 +12,18 @@ @Getter public class SystemRestliEntityClient extends RestliEntityClient implements SystemEntityClient { private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final OperationContext systemOperationContext; + private final ConcurrentHashMap operationContextMap; public SystemRestliEntityClient( + @Nonnull OperationContext systemOperationContext, @Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super(restliClient, backoffPolicy, retryCount); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = - buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); + this.operationContextMap = new ConcurrentHashMap<>(); + this.systemOperationContext = systemOperationContext; + this.entityClientCache = buildEntityClientCache(SystemRestliEntityClient.class, cacheConfig); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java index 747e1e0e1a288..461c2e50fac54 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java @@ -8,6 +8,7 @@ import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.restli.client.Client; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import javax.annotation.Nonnull; @@ -16,22 +17,27 @@ public class UsageClient extends BaseClient { private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = new UsageStatsRequestBuilders(); + private final OperationContext systemOperationContext; private final UsageClientCache usageClientCache; public UsageClient( + @Nonnull OperationContext systemOperationContext, @Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, UsageClientCacheConfig cacheConfig) { super(restliClient, backoffPolicy, retryCount); + this.systemOperationContext = systemOperationContext; this.usageClientCache = UsageClientCache.builder() .config(cacheConfig) .loadFunction( - (String resource, UsageTimeRange range) -> { + (UsageClientCache.Key cacheKey) -> { try { - return getUsageStats(resource, range, systemAuthentication); + return getUsageStats( + cacheKey.getResource(), + cacheKey.getRange(), + systemOperationContext.getAuthentication()); } catch (RemoteInvocationException | URISyntaxException e) { throw new RuntimeException(e); } @@ -45,7 +51,7 @@ public UsageClient( */ @Nonnull public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - return usageClientCache.getUsageStats(resource, range); + return usageClientCache.getUsageStats(systemOperationContext, resource, range); } /** Gets a specific version of downstream {@link EntityRelationships} for the given dataset. */ diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java index b56b59d0feec0..1d12f0395e3f6 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java @@ -4,6 +4,7 @@ import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; +import io.datahubproject.metadata.context.OperationContext; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; @@ -18,13 +19,22 @@ public class UsageClientCache { @NonNull private UsageClientCacheConfig config; @NonNull private final ClientCache cache; - @NonNull private BiFunction loadFunction; + @NonNull private Function loadFunction; - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + public UsageQueryResult getUsageStats( + @Nonnull OperationContext opContext, + @Nonnull String resource, + @Nonnull UsageTimeRange range) { + Key cacheKey = + Key.builder() + .contextId(opContext.getEntityContextId()) + .resource(resource) + .range(range) + .build(); if (config.isEnabled()) { - return cache.get(Key.builder().resource(resource).range(range).build()); + return cache.get(cacheKey); } else { - return loadFunction.apply(resource, range); + return loadFunction.apply(cacheKey); } } @@ -43,7 +53,7 @@ public UsageClientCache build() { Function, Map> loader = (Iterable keys) -> StreamSupport.stream(keys.spliterator(), false) - .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) + .map(k -> Map.entry(k, loadFunction.apply(k))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); // default ttl only @@ -64,7 +74,8 @@ public UsageClientCache build() { @Data @Builder - protected static class Key { + public static class Key { + private final String contextId; private final String resource; private final UsageTimeRange range; } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index afdaf06802a11..27620e0b49d0c 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -12,6 +12,8 @@ import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -159,6 +161,10 @@ public class EntityResource extends CollectionResourceTaskTemplate search( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + + OperationContext opContext = OperationContext.asSession(systemOperationContext, _authorizer, auth, true); + log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency return RestliUtil.toTask( @@ -376,8 +385,8 @@ public Task search( final SearchResult result; // This API is not used by the frontend for search bars so we default to structured result = - _entitySearchService.search( - List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); + _entitySearchService.search(opContext, + List.of(entityName), input, filter, sortCriterion, start, count); return validateSearchResult(result, _entityService); }, MetricRegistry.name(this.getClass(), "search")); @@ -404,15 +413,17 @@ public Task searchAcrossEntities( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); + List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); return RestliUtil.toTask( () -> validateSearchResult( - _searchService.searchAcrossEntities( - entityList, input, filter, sortCriterion, start, count, finalFlags), + _searchService.searchAcrossEntities(opContext, + entityList, input, filter, sortCriterion, start, count), _entityService), "searchAcrossEntities"); } @@ -429,26 +440,30 @@ public Task scrollAcrossEntities( @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { + Authentication auth = AuthenticationContext.getAuthentication(); + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); + List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info( "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, input, scrollId); - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( () -> validateScrollResult( _searchService.scrollAcrossEntities( + opContext, entityList, input, filter, sortCriterion, scrollId, keepAlive, - count, - finalFlags), + count), _entityService), "scrollAcrossEntities"); } @@ -470,7 +485,12 @@ public Task searchAcrossLineage( @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) throws URISyntaxException { + Authentication auth = AuthenticationContext.getAuthentication(); + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) && !isAuthorized( auth, @@ -492,6 +512,7 @@ public Task searchAcrossLineage( () -> validateLineageSearchResult( _lineageSearchService.searchAcrossLineage( + opContext, urn, LineageDirection.valueOf(direction), entityList, @@ -502,8 +523,7 @@ public Task searchAcrossLineage( start, count, startTimeMillis, - endTimeMillis, - searchFlags), + endTimeMillis), _entityService), "searchAcrossRelationships"); } @@ -526,6 +546,12 @@ public Task scrollAcrossLineage( @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) throws URISyntaxException { + + Authentication auth = AuthenticationContext.getAuthentication(); + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true)); + Urn urn = Urn.createFromString(urnStr); List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info( @@ -534,12 +560,12 @@ public Task scrollAcrossLineage( direction, entityList, input); - final SearchFlags finalFlags = - searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); + return RestliUtil.toTask( () -> validateLineageScrollResult( _lineageSearchService.scrollAcrossLineage( + opContext, urn, LineageDirection.valueOf(direction), entityList, @@ -551,8 +577,7 @@ public Task scrollAcrossLineage( keepAlive, count, startTimeMillis, - endTimeMillis, - finalFlags), + endTimeMillis), _entityService), "scrollAcrossLineage"); } @@ -577,12 +602,16 @@ public Task list( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> new SearchFlags().setFulltext(false)); + log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( () -> validateListResult( toListResult( - _entitySearchService.filter(entityName, filter, sortCriterion, start, count)), + _entitySearchService.filter(opContext, entityName, filter, sortCriterion, start, count)), _entityService), MetricRegistry.name(this.getClass(), "filter")); } @@ -595,7 +624,8 @@ public Task autocomplete( @ActionParam(PARAM_QUERY) @Nonnull String query, @ActionParam(PARAM_FIELD) @Optional @Nullable String field, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_LIMIT) int limit) { + @ActionParam(PARAM_LIMIT) int limit, + @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) @@ -607,8 +637,12 @@ public Task autocomplete( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags); + return RestliUtil.toTask( - () -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), + () -> _entitySearchService.autoComplete(opContext, entityName, query, field, filter, limit), MetricRegistry.name(this.getClass(), "autocomplete")); } @@ -620,7 +654,8 @@ public Task browse( @ActionParam(PARAM_PATH) @Nonnull String path, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_LIMIT) int limit) { + @ActionParam(PARAM_LIMIT) int limit, + @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) @@ -632,11 +667,15 @@ public Task browse( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags); + log.info("GET BROWSE RESULTS for {} at path {}", entityName, path); return RestliUtil.toTask( () -> validateBrowseResult( - _entitySearchService.browse(entityName, path, filter, start, limit), + _entitySearchService.browse(opContext, entityName, path, filter, start, limit), _entityService), MetricRegistry.name(this.getClass(), "browse")); } @@ -937,7 +976,9 @@ public Task getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } - return RestliUtil.toTask(() -> _entitySearchService.docCount(entityName)); + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true); + return RestliUtil.toTask(() -> _entitySearchService.docCount(opContext, entityName)); } @Action(name = "batchGetTotalEntityCount") @@ -955,8 +996,10 @@ public Task batchGetTotalEntityCount( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true); return RestliUtil.toTask( - () -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); + () -> new LongMap(_searchService.docCountPerEntity(opContext, Arrays.asList(entityNames)))); } @Action(name = ACTION_LIST_URNS) @@ -1030,11 +1073,13 @@ public Task filter( throw new RestLiServiceException( HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( + systemOperationContext, _authorizer, auth, true); log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( () -> validateSearchResult( - _entitySearchService.filter(entityName, filter, sortCriterion, start, count), + _entitySearchService.filter(opContext.withSearchFlags(flags -> flags.setFulltext(true)), entityName, filter, sortCriterion, start, count), _entityService), MetricRegistry.name(this.getClass(), "search")); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index 55373730e7b67..915756fc8da35 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -2,7 +2,6 @@ import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.Aspect; @@ -19,7 +18,6 @@ import java.util.Set; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import org.joda.time.DateTimeUtils; @Slf4j public class AspectUtils { @@ -70,13 +68,6 @@ public static MetadataChangeProposal buildMetadataChangeProposal( return proposal; } - public static AuditStamp getAuditStamp(Urn actor) { - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setTime(DateTimeUtils.currentTimeMillis()); - auditStamp.setActor(actor); - return auditStamp; - } - public static AspectSpec validateAspect(MetadataChangeLog mcl, EntitySpec entitySpec) { if (!mcl.hasAspectName() || (!ChangeType.DELETE.equals(mcl.getChangeType()) && !mcl.hasAspect())) { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java index 5676dc9ebac54..fcea114446c49 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.recommendation; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.recommendation.candidatesource.RecommendationSource; import com.linkedin.metadata.recommendation.ranker.RecommendationModuleRanker; import com.linkedin.metadata.utils.ConcurrencyUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.util.List; import java.util.Map; @@ -47,7 +47,7 @@ private void validateRecommendationSources(final List cand /** * Return the list of recommendation modules given input context * - * @param userUrn User requesting recommendations + * @param opContext User's context requesting recommendations * @param requestContext Context of where the recommendations are being requested * @param limit Max number of modules to return * @return List of recommendation modules @@ -55,14 +55,17 @@ private void validateRecommendationSources(final List cand @Nonnull @WithSpan public List listRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext, int limit) { + @Nonnull OperationContext opContext, + @Nonnull RecommendationRequestContext requestContext, + int limit) { + // Get recommendation candidates from sources which are eligible, in parallel final List candidateModules = ConcurrencyUtils.transformAndCollectAsync( _candidateSources.stream() - .filter(source -> source.isEligible(userUrn, requestContext)) + .filter(source -> source.isEligible(opContext, requestContext)) .collect(Collectors.toList()), - source -> source.getRecommendationModule(userUrn, requestContext), + source -> source.getRecommendationModule(opContext, requestContext), (source, exception) -> { log.error( "Error while fetching candidate modules from source {}", source, exception); @@ -74,6 +77,6 @@ public List listRecommendations( .collect(Collectors.toList()); // Rank recommendation modules, which determines their ordering during rendering - return _moduleRanker.rank(candidateModules, userUrn, requestContext, limit); + return _moduleRanker.rank(opContext, requestContext, candidateModules, limit); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java index e34fa8ff1bde5..dd2bdfa57dfbf 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -36,7 +36,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index 8d6ccb22660fb..ece3a4e177b3c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.recommendation.SearchParams; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.utils.QueryUtils; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.Collections; @@ -71,10 +72,10 @@ protected boolean isValidCandidate(T candidate) { @Override @WithSpan public List getRecommendations( - @Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nullable RecommendationRequestContext requestContext) { Map aggregationResult = entitySearchService.aggregateByValue( - getEntityNames(entityRegistry), getSearchFieldName(), null, getMaxContent()); + opContext, getEntityNames(entityRegistry), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java index e133e3dc75ff3..96b266f88406e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.recommendation.SearchParams; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.io.IOException; import java.util.List; import java.util.Optional; @@ -60,7 +61,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { analyticsEnabled = @@ -77,8 +78,8 @@ public boolean isEligible( @Override public List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { - SearchRequest searchRequest = buildSearchRequest(userUrn); + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { + SearchRequest searchRequest = buildSearchRequest(opContext.getActorContext().getActorUrn()); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) { final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java index 788ef728e294f..95c5df64ed2f2 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.recommendation.RecommendationContent; import com.linkedin.metadata.recommendation.RecommendationContentArray; import com.linkedin.metadata.recommendation.RecommendationModule; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.util.List; import java.util.Optional; @@ -26,37 +26,38 @@ public interface RecommendationSource { /** * Whether or not this module is eligible for resolution given the context * - * @param userUrn User requesting recommendations + * @param opContext User's context requesting recommendations * @param requestContext Context of where the recommendations are being requested * @return whether this source is eligible */ - boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); + boolean isEligible( + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext); /** * Get recommended items (candidates / content) provided the context * - * @param userUrn User requesting recommendations + * @param opContext User's context requesting recommendations * @param requestContext Context of where the recommendations are being requested * @return list of recommendation candidates */ @WithSpan List getRecommendations( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext); /** * Get the full recommendations module itself provided the request context. * - * @param userUrn User requesting recommendations + * @param opContext User's context requesting recommendations * @param requestContext Context of where the recommendations are being requested * @return list of recommendation candidates */ default Optional getRecommendationModule( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { - if (!isEligible(userUrn, requestContext)) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { + if (!isEligible(opContext, requestContext)) { return Optional.empty(); } - List recommendations = getRecommendations(userUrn, requestContext); + List recommendations = getRecommendations(opContext, requestContext); if (recommendations.isEmpty()) { return Optional.empty(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java index 1fa47d1a13645..2c1b183b9344b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java @@ -1,6 +1,6 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; +import io.datahubproject.metadata.context.OperationContext; import java.util.Set; import javax.annotation.Nonnull; @@ -14,8 +14,8 @@ public class RecommendationUtils { * @return true if the type of the urn is in the set of valid entity types, false otherwise. */ public static boolean isSupportedEntityType( - @Nonnull final Urn urn, @Nonnull final Set entityTypes) { - final String entityType = urn.getEntityType(); + @Nonnull OperationContext opContext, @Nonnull final Set entityTypes) { + final String entityType = opContext.getActorContext().getActorUrn().getEntityType(); return entityTypes.contains(entityType); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index 1a5f1ff4b2ca4..f43f5894a09f7 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -11,6 +11,7 @@ import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -64,7 +65,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java index 0897d441335fa..1eb00fa8eae30 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -35,7 +35,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java index 0fab9a28b51ea..2909a56d26368 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -35,7 +35,7 @@ public RecommendationRenderType getRenderType() { @Override public boolean isEligible( - @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java index f09f83fd6ec25..ab736cf4b2521 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.recommendation.ranker; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.recommendation.RecommendationModule; import com.linkedin.metadata.recommendation.RecommendationRequestContext; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import javax.annotation.Nonnull; @@ -10,15 +10,15 @@ public interface RecommendationModuleRanker { /** * Rank and return the final list of modules * + * @param opContext the user's context * @param candidates Candidate modules to rank - * @param userUrn User requesting recommendations * @param requestContext Context of where the recommendations are being requested * @param limit Max number of modules to return * @return ranked list of modules */ List rank( - @Nonnull List candidates, - @Nonnull Urn userUrn, + @Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext, + @Nonnull List candidates, int limit); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java index 13bc5af91c9e9..4599b42d88e4b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.recommendation.ranker; -import com.linkedin.common.urn.Urn; import com.linkedin.metadata.recommendation.RecommendationModule; import com.linkedin.metadata.recommendation.RecommendationRequestContext; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -10,9 +10,9 @@ public class SimpleRecommendationRanker implements RecommendationModuleRanker { @Override public List rank( - @Nonnull List candidates, - @Nonnull Urn userUrn, + @Nonnull OperationContext opContext, @Nullable RecommendationRequestContext requestContext, + @Nonnull List candidates, int limit) { return candidates.subList(0, Math.min(candidates.size(), limit)); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 03b5c7f5547e7..01b109e7cd924 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -5,9 +5,9 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; @@ -33,7 +33,7 @@ public interface EntitySearchService { * * @param entityName name of the entity */ - long docCount(@Nonnull String entityName); + long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName); /** * Updates or inserts the given search document. @@ -76,19 +76,18 @@ public interface EntitySearchService { * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags flags controlling search options * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, - int size, - @Nullable SearchFlags searchFlags); + int size); /** * Gets a list of documents that match given search request. The results are aggregated and @@ -105,20 +104,19 @@ SearchResult search( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @param searchFlags flags controlling search options * @param facets list of facets we want aggregations for * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ @Nonnull SearchResult search( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets); /** @@ -135,6 +133,7 @@ SearchResult search( */ @Nonnull SearchResult filter( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, @@ -156,6 +155,7 @@ SearchResult filter( */ @Nonnull AutoCompleteResult autoComplete( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String query, @Nullable String field, @@ -174,6 +174,7 @@ AutoCompleteResult autoComplete( */ @Nonnull Map aggregateByValue( + @Nonnull OperationContext opContext, @Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, @@ -191,6 +192,7 @@ Map aggregateByValue( */ @Nonnull BrowseResult browse( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter requestParams, @@ -206,17 +208,16 @@ BrowseResult browse( * @param input search query * @param start start offset of first group * @param count max number of results requested - * @param searchFlags configuration options for search */ @Nonnull - public BrowseResultV2 browseV2( + BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags); + int count); /** * Gets browse snapshot of a given path @@ -227,17 +228,16 @@ public BrowseResultV2 browseV2( * @param input search query * @param start start offset of first group * @param count max number of results requested - * @param searchFlags configuration options for search */ @Nonnull - public BrowseResultV2 browseV2( + BrowseResultV2 browseV2( + @Nonnull OperationContext opContext, @Nonnull List entityNames, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, - int count, - @Nullable SearchFlags searchFlags); + int count); /** * Gets a list of paths for a given urn. @@ -260,20 +260,19 @@ public BrowseResultV2 browseV2( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return - * @param searchFlags flags controlling search options * @return a {@link ScrollResult} that contains a list of matched documents and related search * result metadata */ @Nonnull ScrollResult fullTextScroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags searchFlags); + int size); /** * Gets a list of documents that match given search request. The results are aggregated and @@ -286,31 +285,30 @@ ScrollResult fullTextScroll( * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return - * @param searchFlags flags controlling search options * @return a {@link ScrollResult} that contains a list of matched documents and related search * result metadata */ @Nonnull ScrollResult structuredScroll( + @Nonnull OperationContext opContext, @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, - @Nullable SearchFlags searchFlags); + int size); /** Max result size returned by the underlying search backend */ int maxResultSize(); ExplainResponse explain( + @Nonnull OperationContext opContext, @Nonnull String query, @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, - @Nullable SearchFlags searchFlags, @Nullable String scrollId, @Nullable String keepAlive, int size, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java index 59d40b29e7383..f8b9aa2ea6d64 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java @@ -44,6 +44,7 @@ import com.linkedin.structured.StructuredProperties; import com.linkedin.structured.StructuredPropertyValueAssignment; import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; @@ -67,10 +68,12 @@ public class FormService extends BaseService { private static final int BATCH_FORM_ENTITY_COUNT = 500; + private final OperationContext systemOpContext; + public FormService( - @Nonnull final EntityClient entityClient, - @Nonnull final Authentication systemAuthentication) { - super(entityClient, systemAuthentication); + @Nonnull OperationContext systemOpContext, @Nonnull final EntityClient entityClient) { + super(entityClient, systemOpContext.getAuthentication()); + this.systemOpContext = systemOpContext; } /** Batch associated a form to a given set of entities by urn. */ @@ -162,7 +165,7 @@ public void upsertFormAssignmentRunner( @Nonnull final Urn formUrn, @Nonnull final DynamicFormAssignment formFilters) { try { SearchBasedFormAssignmentRunner.assign( - formFilters, formUrn, BATCH_FORM_ENTITY_COUNT, entityClient, systemAuthentication); + systemOpContext, formFilters, formUrn, BATCH_FORM_ENTITY_COUNT, entityClient); } catch (Exception e) { throw new RuntimeException( String.format("Failed to dynamically assign form with urn: %s", formUrn), e); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java index cd5202ce75b64..7cbdde5959b36 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.service; import static com.linkedin.metadata.entity.AspectUtils.*; +import static com.linkedin.metadata.utils.AuditStampUtils.getAuditStamp; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java new file mode 100644 index 0000000000000..d1aa8e9f5dbb5 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java @@ -0,0 +1,29 @@ +package com.linkedin.metadata.service; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.secret.SecretService; +import javax.annotation.Nonnull; + +public class RestrictedService { + + private final SecretService _secretService; + + public RestrictedService(@Nonnull SecretService secretService) { + this._secretService = secretService; + } + + public Urn encryptRestrictedUrn(@Nonnull final Urn entityUrn) { + final String encryptedEntityUrn = this._secretService.encrypt(entityUrn.toString()); + try { + return new Urn("restricted", encryptedEntityUrn); + } catch (Exception e) { + throw new RuntimeException("Error when creating restricted entity urn", e); + } + } + + public Urn decryptRestrictedUrn(@Nonnull final Urn restrictedUrn) { + final String encryptedUrn = restrictedUrn.getId(); + return UrnUtils.getUrn(this._secretService.decrypt(encryptedUrn)); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java index 73e3bc130ac9d..9c29e2a41a633 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.service.util; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.entity.client.EntityClient; @@ -10,6 +9,7 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.service.FormService; import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -21,31 +21,30 @@ public class SearchBasedFormAssignmentManager { ImmutableList.of(Constants.DATASET_ENTITY_NAME); public static void apply( + OperationContext opContext, DynamicFormAssignment formFilters, Urn formUrn, int batchFormEntityCount, - EntityClient entityClient, - Authentication authentication) + EntityClient entityClient) throws Exception { try { int totalResults = 0; int numResults = 0; String scrollId = null; - FormService formService = new FormService(entityClient, authentication); + FormService formService = new FormService(opContext, entityClient); do { ScrollResult results = entityClient.scrollAcrossEntities( + opContext, ENTITY_TYPES, "*", formFilters.getFilter(), scrollId, "5m", - batchFormEntityCount, - null, - authentication); + batchFormEntityCount); if (!results.hasEntities() || results.getNumEntities() == 0 diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java index a20f71f550c65..f24307c30793d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java @@ -1,27 +1,27 @@ package com.linkedin.metadata.service.util; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.client.EntityClient; import com.linkedin.form.DynamicFormAssignment; +import io.datahubproject.metadata.context.OperationContext; import lombok.extern.slf4j.Slf4j; @Slf4j public class SearchBasedFormAssignmentRunner { public static void assign( + OperationContext opContext, DynamicFormAssignment formFilters, Urn formUrn, int batchFormEntityCount, - EntityClient entityClient, - Authentication authentication) { + EntityClient entityClient) { Runnable runnable = new Runnable() { @Override public void run() { try { SearchBasedFormAssignmentManager.apply( - formFilters, formUrn, batchFormEntityCount, entityClient, authentication); + opContext, formFilters, formUrn, batchFormEntityCount, entityClient); } catch (Exception e) { log.error( "SearchBasedFormAssignmentRunner failed to run. " diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java new file mode 100644 index 0000000000000..239e56cfd97a8 --- /dev/null +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java @@ -0,0 +1,37 @@ +package com.linkedin.metadata.service; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.secret.SecretService; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class RestrictedServiceTest { + + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz1)"); + private static final String ENCRYPED_DATASET_URN = "12d3as456tgs"; + private static final Urn TEST_RESTRICTED_URN = + UrnUtils.getUrn(String.format("urn:li:restricted:%s", ENCRYPED_DATASET_URN)); + + @Test + private void testEncryptRestrictedUrn() throws Exception { + SecretService mockSecretService = Mockito.mock(SecretService.class); + Mockito.when(mockSecretService.encrypt(TEST_DATASET_URN.toString())) + .thenReturn(ENCRYPED_DATASET_URN); + final RestrictedService service = new RestrictedService(mockSecretService); + + Assert.assertEquals(service.encryptRestrictedUrn(TEST_DATASET_URN), TEST_RESTRICTED_URN); + } + + @Test + private void testDecryptRestrictedUrn() throws Exception { + SecretService mockSecretService = Mockito.mock(SecretService.class); + Mockito.when(mockSecretService.decrypt(ENCRYPED_DATASET_URN)) + .thenReturn(TEST_DATASET_URN.toString()); + final RestrictedService service = new RestrictedService(mockSecretService); + + Assert.assertEquals(service.decryptRestrictedUrn(TEST_RESTRICTED_URN), TEST_DATASET_URN); + } +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index afaeb9c81039b..ffa0d600a2351 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -10,8 +10,8 @@ import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import io.datahubproject.metadata.context.OperationContext; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; @@ -43,6 +43,10 @@ private AspectRetriever getAspectRetriever(WebApplicationContext ctx) { return (AspectRetriever) ctx.getBean("aspectRetriever"); } + private OperationContext getOperationContext(WebApplicationContext ctx) { + return (OperationContext) ctx.getBean("systemOperationContext"); + } + private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { SearchConfiguration searchConfiguration = getConfigProvider(ctx).getElasticSearch().getSearch(); AspectRetriever aspectRetriever = getAspectRetriever(ctx); @@ -84,15 +88,18 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { SearchRequestHandler.getBuilder( entitySpec, searchConfiguration, null, aspectRetriever) .getSearchRequest( + getOperationContext(ctx) + .withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), "*", null, null, 0, 0, - new SearchFlags() - .setFulltext(true) - .setSkipHighlighting(true) - .setSkipAggregates(true), null); FunctionScoreQueryBuilder rankingQuery = diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index 66a5de48790c2..b89ee970c875f 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -55,6 +55,7 @@ "privileges":[ "EDIT_ENTITY", "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "EDIT_LINEAGE", "EDIT_ENTITY_ASSERTIONS", "SEARCH_PRIVILEGE", @@ -106,6 +107,7 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "SEARCH_PRIVILEGE", "GET_COUNTS_PRIVILEGE", "GET_TIMESERIES_ASPECT_PRIVILEGE", @@ -199,6 +201,7 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_OWNERS", @@ -281,6 +284,7 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_DOCS", @@ -399,6 +403,7 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "VIEW_DATASET_USAGE", "VIEW_DATASET_PROFILE", "SEARCH_PRIVILEGE", @@ -427,6 +432,7 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", + "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_OWNERS", diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index 6889d56d4ebc2..d6d2d24109874 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.AllArgsConstructor; @@ -139,6 +140,19 @@ public class PoliciesConfig { public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = Privilege.of("VIEW_ENTITY_PAGE", "View Entity Page", "The ability to view the entity page."); + public static final Privilege VIEW_ENTITY_PRIVILEGE = + Privilege.of( + "VIEW_ENTITY", "View Entity", "The ability to view the entity in search results."); + + /* + These two privileges are logically the same for search for now. + In the future, we might allow search but not the entity page view. + */ + public static final Set VIEW_ENTITY_PRIVILEGES = + Set.of( + PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType(), + PoliciesConfig.VIEW_ENTITY_PAGE_PRIVILEGE.getType()); + public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = Privilege.of( "EDIT_ENTITY_TAGS", "Edit Tags", "The ability to add and remove tags to an asset."); @@ -242,6 +256,7 @@ public class PoliciesConfig { EDIT_ENTITY_DEPRECATION_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, + VIEW_ENTITY_PRIVILEGE, EDIT_ENTITY_INCIDENTS_PRIVILEGE); // Dataset Privileges diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 6ba311cf166d4..404dd001353a6 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -5,25 +5,31 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import java.net.URISyntaxException; -import java.time.Clock; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.joda.time.DateTimeUtils; @Slf4j public class AuditStampUtils { private AuditStampUtils() {} public static AuditStamp createDefaultAuditStamp() { - return new AuditStamp() - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) - .setTime(Clock.systemUTC().millis()); + return getAuditStamp(UrnUtils.getUrn(SYSTEM_ACTOR)); } - public static AuditStamp createAuditStamp(@Nonnull String actorUrn) throws URISyntaxException { + public static AuditStamp createAuditStamp(@Nonnull String actorUrn) { + return getAuditStamp(UrnUtils.getUrn(actorUrn)); + } + + public static AuditStamp getAuditStamp(Urn actor) { + return getAuditStamp(actor, null); + } + + public static AuditStamp getAuditStamp(@Nonnull Urn actor, @Nullable Long currentTimeMs) { AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(actorUrn)); - auditStamp.setTime(Clock.systemUTC().millis()); + auditStamp.setTime(currentTimeMs != null ? currentTimeMs : DateTimeUtils.currentTimeMillis()); + auditStamp.setActor(actor); return auditStamp; } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index 9df708c6e9fdc..c3c9cac6280ed 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -23,8 +23,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilders; @Slf4j public class SearchUtil { @@ -33,9 +31,9 @@ public class SearchUtil { public static final String AGGREGATION_SPECIAL_TYPE_DELIMITER = "␝"; public static final String MISSING_SPECIAL_TYPE = "missing"; public static final String INDEX_VIRTUAL_FIELD = "_entityType"; + public static final String ES_INDEX_FIELD = "_index"; public static final String KEYWORD_SUFFIX = ".keyword"; private static final String URN_PREFIX = "urn:"; - private static final String REMOVED = "removed"; private SearchUtil() {} @@ -73,7 +71,7 @@ public static FilterValue createFilterValue(String value, Long facetCount, Boole private static Criterion transformEntityTypeCriterion( Criterion criterion, IndexConvention indexConvention) { return criterion - .setField("_index") + .setField(ES_INDEX_FIELD) .setValues( new StringArray( criterion.getValues().stream() @@ -124,30 +122,6 @@ public static Filter transformFilterForEntities( return filter; } - /** - * Applies a default filter to remove entities that are soft deleted only if there isn't a filter - * for the REMOVED field already - */ - public static BoolQueryBuilder filterSoftDeletedByDefault( - @Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { - boolean removedInOrFilter = false; - if (filter != null) { - removedInOrFilter = - filter.getOr().stream() - .anyMatch( - or -> - or.getAnd().stream() - .anyMatch( - criterion -> - criterion.getField().equals(REMOVED) - || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX))); - } - if (!removedInOrFilter) { - filterQuery.mustNot(QueryBuilders.matchQuery(REMOVED, true)); - } - return filterQuery; - } - public static SortCriterion sortBy(@Nonnull String field, @Nullable SortOrder direction) { SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(field); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java index 764630eb73973..47801cd2054fa 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java @@ -12,6 +12,8 @@ // Default implementation of search index naming convention public class IndexConventionImpl implements IndexConvention { + public static final IndexConvention NO_PREFIX = new IndexConventionImpl(null); + // Map from Entity name -> Index name private final Map indexNameMapping = new ConcurrentHashMap<>(); private final Optional _prefix; diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java index f3e52c9989775..8074f344cd244 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java @@ -10,7 +10,7 @@ public class IndexConventionImplTest { @Test public void testIndexConventionNoPrefix() { - IndexConvention indexConventionNoPrefix = new IndexConventionImpl(null); + IndexConvention indexConventionNoPrefix = IndexConventionImpl.NO_PREFIX; String entityName = "dataset"; String expectedIndexName = "datasetindex_v2"; assertEquals(indexConventionNoPrefix.getEntityIndexName(entityName), expectedIndexName); @@ -42,7 +42,7 @@ public void testIndexConventionPrefix() { @Test public void testTimeseriesIndexConventionNoPrefix() { - IndexConvention indexConventionNoPrefix = new IndexConventionImpl(null); + IndexConvention indexConventionNoPrefix = IndexConventionImpl.NO_PREFIX; String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "dataset_datasetusagestatisticsaspect_v1"; diff --git a/settings.gradle b/settings.gradle index 4614b6ed4ccaf..e58c1c851c8f1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -64,3 +64,4 @@ include 'mock-entity-registry' include 'metadata-service:services' include 'metadata-service:configuration' include ':metadata-jobs:common' +include ':metadata-operation-context' diff --git a/smoke-test/cypress-dev.sh b/smoke-test/cypress-dev.sh index b1c6571e1a065..0359c6eb547e6 100755 --- a/smoke-test/cypress-dev.sh +++ b/smoke-test/cypress-dev.sh @@ -10,6 +10,8 @@ fi source venv/bin/activate +export KAFKA_BROKER_CONTAINER="datahub-kafka-broker-1" +export KAFKA_BOOTSTRAP_SERVER="broker:9092" python -c 'from tests.cypress.integration_test import ingest_data; ingest_data()' cd tests/cypress diff --git a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py index feabcc5f9d655..d5e1ade663dff 100644 --- a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py +++ b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py @@ -106,6 +106,11 @@ def test_group_upsert(wait_for_healthchecks: Any) -> None: "owners": [ {"owner": "urn:li:corpuser:user1", "type": "TECHNICAL_OWNER"} ], + "ownerTypes": { + "urn:li:ownershipType:__system__none": [ + "urn:li:corpuser:user1", + ], + }, }, "status": {"removed": False}, } diff --git a/smoke-test/tests/consistency_utils.py b/smoke-test/tests/consistency_utils.py index 5ffc642d49469..4335e2a874c1e 100644 --- a/smoke-test/tests/consistency_utils.py +++ b/smoke-test/tests/consistency_utils.py @@ -8,6 +8,10 @@ ELASTICSEARCH_REFRESH_INTERVAL_SECONDS: int = int( os.getenv("ELASTICSEARCH_REFRESH_INTERVAL_SECONDS", 5) ) +KAFKA_BROKER_CONTAINER: str = str( + os.getenv("KAFKA_BROKER_CONTAINER", "datahub-broker-1") +) +KAFKA_BOOTSTRAP_SERVER: str = str(os.getenv("KAFKA_BOOTSTRAP_SERVER", "broker:29092")) logger = logging.getLogger(__name__) @@ -21,19 +25,26 @@ def wait_for_writes_to_sync(max_timeout_in_sec: int = 120) -> None: lag_zero = False while not lag_zero and (time.time() - start_time) < max_timeout_in_sec: time.sleep(1) # micro-sleep - completed_process = subprocess.run( - "docker exec datahub-broker-1 /bin/kafka-consumer-groups --bootstrap-server broker:29092 --group generic-mae-consumer-job-client --describe | grep -v LAG | awk '{print $6}'", - capture_output=True, - shell=True, - text=True, - ) - result = str(completed_process.stdout) - lines = result.splitlines() - lag_values = [int(line) for line in lines if line != ""] - maximum_lag = max(lag_values) if lag_values else 0 - if maximum_lag == 0: - lag_zero = True + cmd = ( + f"docker exec {KAFKA_BROKER_CONTAINER} /bin/kafka-consumer-groups --bootstrap-server {KAFKA_BOOTSTRAP_SERVER} --group generic-mae-consumer-job-client --describe | grep -v LAG " + + "| awk '{print $6}'" + ) + try: + completed_process = subprocess.run( + cmd, + capture_output=True, + shell=True, + text=True, + ) + result = str(completed_process.stdout) + lines = result.splitlines() + lag_values = [int(line) for line in lines if line != ""] + maximum_lag = max(lag_values) + if maximum_lag == 0: + lag_zero = True + except ValueError: + logger.warning(f"Error reading kafka lag using command: {cmd}") if not lag_zero: logger.warning(