Skip to content

Commit

Permalink
Merge branch 'master' into cus2240-lookml-ingestion-fails
Browse files Browse the repository at this point in the history
  • Loading branch information
sid-acryl authored Jul 26, 2024
2 parents e31f7d8 + 01b3461 commit dd02704
Show file tree
Hide file tree
Showing 188 changed files with 2,363 additions and 905 deletions.
3 changes: 2 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ buildscript {
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '12.16.1'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.16.0'
ext.openLineageVersion = '1.19.0'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand Down Expand Up @@ -111,6 +111,7 @@ project.ext.externalDependency = [
'avroCompiler': 'org.apache.avro:avro-compiler:1.11.3',
'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.17',
'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:2.0.3',
'awsS3': 'software.amazon.awssdk:s3:2.26.21',
'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.13',
'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.2',
'awsRds':'software.amazon.awssdk:rds:2.18.24',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1869,7 +1869,9 @@ private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) {
"CorpGroup",
typeWiring ->
typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher(
"relationships",
new EntityRelationshipsResultResolver(graphClient, entityService))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
.dataFetcher(
"aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,21 @@

import com.linkedin.common.EntityRelationship;
import com.linkedin.common.EntityRelationships;
import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityRelationshipsResult;
import com.linkedin.datahub.graphql.generated.RelationshipsInput;
import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper;
import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.GraphClient;
import com.linkedin.metadata.query.filter.RelationshipDirection;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
Expand All @@ -29,8 +32,16 @@ public class EntityRelationshipsResultResolver

private final GraphClient _graphClient;

private final EntityService _entityService;

public EntityRelationshipsResultResolver(final GraphClient graphClient) {
this(graphClient, null);
}

public EntityRelationshipsResultResolver(
final GraphClient graphClient, final EntityService entityService) {
_graphClient = graphClient;
_entityService = entityService;
}

@Override
Expand All @@ -47,13 +58,16 @@ public CompletableFuture<EntityRelationshipsResult> get(DataFetchingEnvironment
final Integer count = input.getCount(); // Optional!
final RelationshipDirection resolvedDirection =
RelationshipDirection.valueOf(relationshipDirection.toString());
final boolean includeSoftDelete = input.getIncludeSoftDelete();

return GraphQLConcurrencyUtils.supplyAsync(
() ->
mapEntityRelationships(
context,
fetchEntityRelationships(
urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()),
resolvedDirection),
resolvedDirection,
includeSoftDelete),
this.getClass().getSimpleName(),
"get");
}
Expand All @@ -72,13 +86,28 @@ private EntityRelationships fetchEntityRelationships(
private EntityRelationshipsResult mapEntityRelationships(
@Nullable final QueryContext context,
final EntityRelationships entityRelationships,
final RelationshipDirection relationshipDirection) {
final RelationshipDirection relationshipDirection,
final boolean includeSoftDelete) {
final EntityRelationshipsResult result = new EntityRelationshipsResult();

final Set<Urn> existentUrns;
if (context != null && _entityService != null && !includeSoftDelete) {
Set<Urn> allRelatedUrns =
entityRelationships.getRelationships().stream()
.map(EntityRelationship::getEntity)
.collect(Collectors.toSet());
existentUrns = _entityService.exists(context.getOperationContext(), allRelatedUrns, false);
} else {
existentUrns = null;
}

List<EntityRelationship> viewable =
entityRelationships.getRelationships().stream()
.filter(
rel -> context == null || canView(context.getOperationContext(), rel.getEntity()))
rel ->
(existentUrns == null || existentUrns.contains(rel.getEntity()))
&& (context == null
|| canView(context.getOperationContext(), rel.getEntity())))
.collect(Collectors.toList());

result.setStart(entityRelationships.getStart());
Expand Down
5 changes: 5 additions & 0 deletions datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -1267,6 +1267,11 @@ input RelationshipsInput {
The number of results to be returned
"""
count: Int

"""
Whether to include soft-deleted, related, entities
"""
includeSoftDelete: Boolean = true
}

"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
package com.linkedin.datahub.graphql.resolvers.load;

import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;

import com.linkedin.common.EntityRelationship;
import com.linkedin.common.EntityRelationshipArray;
import com.linkedin.common.EntityRelationships;
import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.generated.*;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.GraphClient;
import graphql.schema.DataFetchingEnvironment;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;

public class EntityRelationshipsResultResolverTest {
private final Urn existentUser = Urn.createFromString("urn:li:corpuser:johndoe");
private final Urn softDeletedUser = Urn.createFromString("urn:li:corpuser:deletedUser");

private CorpUser existentEntity;
private CorpUser softDeletedEntity;

private EntityService _entityService;
private GraphClient _graphClient;

private EntityRelationshipsResultResolver resolver;
private RelationshipsInput input;
private DataFetchingEnvironment mockEnv;

public EntityRelationshipsResultResolverTest() throws URISyntaxException {}

@BeforeMethod
public void setupTest() {
_entityService = mock(EntityService.class);
_graphClient = mock(GraphClient.class);
resolver = new EntityRelationshipsResultResolver(_graphClient, _entityService);

mockEnv = mock(DataFetchingEnvironment.class);
QueryContext context = getMockAllowContext();
when(mockEnv.getContext()).thenReturn(context);

CorpGroup source = new CorpGroup();
source.setUrn("urn:li:corpGroup:group1");
when(mockEnv.getSource()).thenReturn(source);

when(_entityService.exists(any(), eq(Set.of(existentUser, softDeletedUser)), eq(true)))
.thenReturn(Set.of(existentUser, softDeletedUser));
when(_entityService.exists(any(), eq(Set.of(existentUser, softDeletedUser)), eq(false)))
.thenReturn(Set.of(existentUser));

input = new RelationshipsInput();
input.setStart(0);
input.setCount(10);
input.setDirection(RelationshipDirection.INCOMING);
input.setTypes(List.of("SomeType"));

EntityRelationships entityRelationships =
new EntityRelationships()
.setStart(0)
.setCount(2)
.setTotal(2)
.setRelationships(
new EntityRelationshipArray(
new EntityRelationship().setEntity(existentUser).setType("SomeType"),
new EntityRelationship().setEntity(softDeletedUser).setType("SomeType")));

// always expected INCOMING, and "SomeType" in all tests
when(_graphClient.getRelatedEntities(
eq(source.getUrn()),
eq(input.getTypes()),
same(com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING),
eq(input.getStart()),
eq(input.getCount()),
any()))
.thenReturn(entityRelationships);

when(mockEnv.getArgument(eq("input"))).thenReturn(input);

existentEntity = new CorpUser();
existentEntity.setUrn(existentUser.toString());
existentEntity.setType(EntityType.CORP_USER);

softDeletedEntity = new CorpUser();
softDeletedEntity.setUrn(softDeletedUser.toString());
softDeletedEntity.setType(EntityType.CORP_USER);
}

@Test
public void testIncludeSoftDeleted() throws ExecutionException, InterruptedException {
EntityRelationshipsResult expected = new EntityRelationshipsResult();
expected.setRelationships(
List.of(resultRelationship(existentEntity), resultRelationship(softDeletedEntity)));
expected.setStart(0);
expected.setCount(2);
expected.setTotal(2);
assertEquals(resolver.get(mockEnv).get().toString(), expected.toString());
}

@Test
public void testExcludeSoftDeleted() throws ExecutionException, InterruptedException {
input.setIncludeSoftDelete(false);
EntityRelationshipsResult expected = new EntityRelationshipsResult();
expected.setRelationships(List.of(resultRelationship(existentEntity)));
expected.setStart(0);
expected.setCount(1);
expected.setTotal(1);
assertEquals(resolver.get(mockEnv).get().toString(), expected.toString());
}

private com.linkedin.datahub.graphql.generated.EntityRelationship resultRelationship(
Entity entity) {
return new com.linkedin.datahub.graphql.generated.EntityRelationship(
"SomeType", RelationshipDirection.INCOMING, entity, null);
}
}
30 changes: 27 additions & 3 deletions datahub-web-react/src/graphql/group.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ query getGroup($urn: String!, $membersCount: Int!) {
direction: INCOMING
start: 0
count: $membersCount
includeSoftDelete: false
}
) {
start
Expand Down Expand Up @@ -86,6 +87,7 @@ query getAllGroupMembers($urn: String!, $start: Int!, $count: Int!) {
direction: INCOMING
start: $start
count: $count
includeSoftDelete: false
}
) {
start
Expand Down Expand Up @@ -121,7 +123,15 @@ query getAllGroupMembers($urn: String!, $start: Int!, $count: Int!) {

query getGroupMembers($urn: String!, $start: Int!, $count: Int!) {
corpGroup(urn: $urn) {
relationships(input: { types: ["IsMemberOfGroup"], direction: INCOMING, start: $start, count: $count }) {
relationships(
input: {
types: ["IsMemberOfGroup"]
direction: INCOMING
start: $start
count: $count
includeSoftDelete: false
}
) {
start
count
total
Expand Down Expand Up @@ -155,7 +165,15 @@ query getGroupMembers($urn: String!, $start: Int!, $count: Int!) {

query getNativeGroupMembers($urn: String!, $start: Int!, $count: Int!) {
corpGroup(urn: $urn) {
relationships(input: { types: ["IsMemberOfNativeGroup"], direction: INCOMING, start: $start, count: $count }) {
relationships(
input: {
types: ["IsMemberOfNativeGroup"]
direction: INCOMING
start: $start
count: $count
includeSoftDelete: false
}
) {
start
count
total
Expand Down Expand Up @@ -209,7 +227,13 @@ query listGroups($input: ListGroupsInput!) {
pictureLink
}
memberCount: relationships(
input: { types: ["IsMemberOfGroup", "IsMemberOfNativeGroup"], direction: INCOMING, start: 0, count: 1 }
input: {
types: ["IsMemberOfGroup", "IsMemberOfNativeGroup"]
direction: INCOMING
start: 0
count: 1
includeSoftDelete: false
}
) {
total
}
Expand Down
2 changes: 1 addition & 1 deletion docs-website/filterTagIndexes.json
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,7 @@
}
},
{
"Path": "docs/metadata-integration/java/spark-lineage-beta",
"Path": "docs/metadata-integration/java/acryl-spark-lineage",
"imgPath": "img/logos/platforms/spark.svg",
"Title": "Spark",
"Description": "Spark is a data processing tool that enables fast and efficient processing of large-scale data sets using distributed computing.",
Expand Down
11 changes: 4 additions & 7 deletions docs-website/sidebars.js
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ module.exports = {
},
{
"Managed DataHub Release History": [
"docs/managed-datahub/release-notes/v_0_3_4",
"docs/managed-datahub/release-notes/v_0_3_3",
"docs/managed-datahub/release-notes/v_0_3_2",
"docs/managed-datahub/release-notes/v_0_3_1",
Expand Down Expand Up @@ -419,17 +420,13 @@ module.exports = {
},
{
type: "doc",
id: "metadata-integration/java/spark-lineage/README",
label: "Spark (Legacy)",
},
{
type: "doc",
id: "metadata-integration/java/spark-lineage-beta/README",
id: "metadata-integration/java/acryl-spark-lineage/README",
label: "Spark",
},
//"docker/airflow/local_airflow",
"metadata-ingestion/integration_docs/great-expectations",
"metadata-integration/java/datahub-protobuf/README",
//"metadata-integration/java/spark-lineage-legacy/README",
//"metadata-ingestion/source-docs-template",
{
type: "autogenerated",
Expand Down Expand Up @@ -886,7 +883,7 @@ module.exports = {
//"docs/how/graph-onboarding",
//"docs/demo/graph-onboarding",
//"metadata-integration/java/spark-lineage/README",
// "metadata-integration/java/spark-lineage-beta/README.md
// "metadata-integration/java/acryl-spark-lineage/README.md
// "metadata-integration/java/openlineage-converter/README"
//"metadata-ingestion-modules/airflow-plugin/README"
//"metadata-ingestion-modules/dagster-plugin/README"
Expand Down
Loading

0 comments on commit dd02704

Please sign in to comment.