From cff7a1c5dc7f5d95b3b55a8e7199140799e36e9e Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 12:35:53 -0800 Subject: [PATCH 01/25] PRD-968 and PRD-967: Migrating the PDL models for incidents --- .../common/IncidentSummaryDetails.pdl | 31 +++++++ .../com/linkedin/common/IncidentsSummary.pdl | 90 +++++++++++++++++++ .../com/linkedin/incident/IncidentInfo.pdl | 87 ++++++++++++++++++ .../com/linkedin/incident/IncidentSource.pdl | 42 +++++++++ .../com/linkedin/incident/IncidentStatus.pdl | 42 +++++++++ .../com/linkedin/incident/IncidentType.pdl | 40 +++++++++ .../com/linkedin/metadata/key/IncidentKey.pdl | 14 +++ .../src/main/resources/entity-registry.yml | 11 +++ 8 files changed, 357 insertions(+) create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/IncidentSummaryDetails.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/IncidentsSummary.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/incident/IncidentInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/incident/IncidentStatus.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/metadata/key/IncidentKey.pdl diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/IncidentSummaryDetails.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/IncidentSummaryDetails.pdl new file mode 100644 index 0000000000000..630943f1bee81 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/IncidentSummaryDetails.pdl @@ -0,0 +1,31 @@ +namespace com.linkedin.common + +/** + * Summary statistics about incidents on an entity. + */ +record IncidentSummaryDetails { + /** + * The urn of the incident + */ + urn: Urn + + /** + * The type of an incident + */ + type: string + + /** + * The time at which the incident was raised in milliseconds since epoch. + */ + createdAt: long + + /** + * The time at which the incident was marked as resolved in milliseconds since epoch. Null if the incident is still active. + */ + resolvedAt: optional long + + /** + * The priority of the incident + */ + priority: optional int +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/IncidentsSummary.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/IncidentsSummary.pdl new file mode 100644 index 0000000000000..e1367a326e24b --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/IncidentsSummary.pdl @@ -0,0 +1,90 @@ +namespace com.linkedin.common + +/** + * Summary related incidents on an entity. + */ +@Aspect = { + "name": "incidentsSummary" +} +record IncidentsSummary { + /** + * Resolved incidents for an asset + * Deprecated! Use the richer resolvedIncidentsDetails instead. + */ + @deprecated + resolvedIncidents: array[Urn] = [] + + /** + * Active incidents for an asset + * Deprecated! Use the richer activeIncidentsDetails instead. + */ + @deprecated + activeIncidents: array[Urn] = [] + + /** + * Summary details about the set of resolved incidents + */ + @Searchable = { + "/*/urn" : { + "fieldType": "URN", + "fieldName": "resolvedIncidents", + "hasValuesFieldName": "hasResolvedIncidents", + "numValuesFieldName": "numResolvedIncidents" + }, + "/*/type" : { + "fieldType": "KEYWORD", + "fieldName": "resolvedIncidentTypes" + }, + "/*/createdAt" : { + "fieldType": "DATETIME", + "fieldName": "resolvedIncidentCreatedTimes" + }, + "/*/priority" : { + "fieldType": "COUNT", + "fieldName": "resolvedIncidentPriorities" + }, + "/*/resolvedAt" : { + "fieldType": "DATETIME", + "fieldName": "resolvedIncidentResolvedTimes" + } + } + @Relationship = { + "/*/urn" : { + "name": "ResolvedIncidents", + "entityTypes": [ "incident" ] + } + } + resolvedIncidentDetails: array[IncidentSummaryDetails] = [] + + /** + * Summary details about the set of active incidents + */ + @Searchable = { + "/*/urn" : { + "fieldType": "URN", + "fieldName": "activeIncidents", + "hasValuesFieldName": "hasActiveIncidents", + "numValuesFieldName": "numActiveIncidents", + "addHasValuesToFilters": true + }, + "/*/type" : { + "fieldType": "KEYWORD", + "fieldName": "activeIncidentTypes" + }, + "/*/createdAt" : { + "fieldType": "DATETIME", + "fieldName": "activeIncidentCreatedTimes" + }, + "/*/priority" : { + "fieldType": "COUNT", + "fieldName": "activeIncidentPriorities" + } + } + @Relationship = { + "/*/urn" : { + "name": "ActiveIncidents", + "entityTypes": [ "incident" ] + } + } + activeIncidentDetails: array[IncidentSummaryDetails] = [] +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentInfo.pdl new file mode 100644 index 0000000000000..44baff5270bed --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentInfo.pdl @@ -0,0 +1,87 @@ +namespace com.linkedin.incident + +import com.linkedin.common.AuditStamp +import com.linkedin.common.EntityReference +import com.linkedin.common.Urn + +/** + * Information about an incident raised on an asset. + */ +@Aspect = { + "name": "incidentInfo" +} +record IncidentInfo { + /** + * The type of incident + */ + @Searchable = { + "addToFilters": true, + "filterNameOverride": "Type" + } + type: IncidentType + + /** + * An optional custom incident type. Present only if type is 'CUSTOM'. + */ + @Searchable = { + "addToFilters": true, + "filterNameOverride": "Other Type" + } + customType: optional string + + /** + * Optional title associated with the incident + */ + title: optional string + + /** + * Optional description associated with the incident + */ + description: optional string + + /** + * A reference to the entity associated with the incident. + */ + @Relationship = { + "/*": { + "name": "IncidentOn", + "entityTypes": [ "dataset", "chart", "dashboard", "dataFlow", "dataJob", "schemaField" ] + } + } + @Searchable = { + "/*": { + "fieldType": "URN" + } + } + entities: array[Urn] + + /** + * A numeric severity or priority for the incident. On the UI we will translate this into something easy to understand. + */ + @Searchable = { + "addToFilters": true, + "filterNameOverride": "Priority" + } + priority: optional int = 0 + + /** + * The current status of an incident, i.e. active or inactive. + */ + status: IncidentStatus + + /** + * The source of an incident, i.e. how it was generated. + */ + source: optional IncidentSource + + /** + * The time at which the request was initially created + */ + @Searchable = { + "/time": { + "fieldName": "created", + "fieldType": "COUNT" + } + } + created: AuditStamp +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl new file mode 100644 index 0000000000000..26fa7ed2f540a --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl @@ -0,0 +1,42 @@ +namespace com.linkedin.incident + +import com.linkedin.common.AuditStamp +import com.linkedin.common.Urn + +/** + * Information about the source of an incident raised on an asset. + */ +@Aspect = { + "name": "incidentSource" +} +record IncidentSource { + /** + * Message associated with the incident + */ + @Searchable = { + "addToFilters": true, + "filterNameOverride": "Source" + } + type: enum IncidentSourceType { + /** + * Manually created incident, via UI or API. + */ + MANUAL + + /** + * An assertion has failed, triggering the incident. + */ + ASSERTION_FAILURE + } + + /** + * Reference to an URN related to the source of an incident. Currently populated when the type of the incident + * is ASSERTION_FAILURE. + * + * If this incident was raised due to an assertion failure, then this will contain the urn of the source Assertion. + */ + @Searchable = { + "fieldType": "URN" + } + sourceUrn: optional Urn +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentStatus.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentStatus.pdl new file mode 100644 index 0000000000000..a3548b3cda520 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentStatus.pdl @@ -0,0 +1,42 @@ +namespace com.linkedin.incident + +import com.linkedin.common.AuditStamp + +/** + * Information about an incident raised on an asset + */ +record IncidentStatus { + /** + * The state of the incident + */ + @Searchable = { + "addToFilters": true, + "filterNameOverride": "Status" + } + state: enum IncidentState { + /** + * The incident is ongoing, or active. + */ + ACTIVE + /** + * The incident is resolved. + */ + RESOLVED + } + + /** + * Optional message associated with the incident + */ + message: optional string + + /** + * The time at which the request was initially created + */ + @Searchable = { + "/time": { + "fieldName": "lastUpdated", + "fieldType": "COUNT" + } + } + lastUpdated: AuditStamp +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl new file mode 100644 index 0000000000000..56de7a145f542 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl @@ -0,0 +1,40 @@ +namespace com.linkedin.incident + +/** + * A type of asset incident + */ +enum IncidentType { + /** + * An Freshness Assertion has failed, triggering the incident. + * Raised on entities where assertions are configured to generate incidents. + */ + FRESHNESS + + /** + * An Volume Assertion has failed, triggering the incident. + * Raised on entities where assertions are configured to generate incidents. + */ + VOLUME + + /** + * An assertion on a particular column(s) of a Dataset has triggered the incident. + * Raised on Datasets where assertions are configured to generate incidents. + */ + DATASET_COLUMN + + /** + * An assertion on the row count of a Dataset has triggered the incident. + * Raised on datasets where assertions are configured to generate incidents. + */ + DATASET_ROWS + + /** + * A misc. operational incident, e.g. failure to materialize a dataset. + */ + OPERATIONAL + + /** + * A custom type of incident + */ + CUSTOM +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/key/IncidentKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/IncidentKey.pdl new file mode 100644 index 0000000000000..6fe682a34c2a9 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/IncidentKey.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.metadata.key + +/** + * Key for an asset Incident + */ +@Aspect = { + "name": "incidentKey" +} +record IncidentKey { + /** + * A unique id for the incident. Generated on the server side at incident creation time. + */ + id: string +} \ No newline at end of file diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index 62eaa2af2d4a0..93348a37964d1 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -28,6 +28,7 @@ entities: - testResults - siblings - embed + - incidentsSummary - datasetProperties - editableDatasetProperties - datasetDeprecation @@ -71,6 +72,7 @@ entities: - browsePathsV2 - structuredProperties - subTypes + - incidentsSummary - name: dataFlow category: core keyAspect: dataFlowKey @@ -89,6 +91,7 @@ entities: - dataPlatformInstance - browsePathsV2 - structuredProperties + - incidentsSummary - name: dataProcess keyAspect: dataProcessKey aspects: @@ -127,6 +130,7 @@ entities: - browsePathsV2 - subTypes - structuredProperties + - incidentsSummary - name: dashboard keyAspect: dashboardKey aspects: @@ -148,6 +152,7 @@ entities: - dataPlatformInstance - browsePathsV2 - structuredProperties + - incidentsSummary - name: notebook doc: Notebook represents a combination of query, text, chart and etc. This is in BETA version keyAspect: notebookKey @@ -434,6 +439,12 @@ entities: keyAspect: globalSettingsKey aspects: - globalSettingsInfo + - name: incident + doc: An incident for an asset. + category: core + keyAspect: incidentKey + aspects: + - incidentInfo - name: dataHubRole category: core keyAspect: dataHubRoleKey From 1ed955e7a3c51b0c84f90fc9351d339980dc7b40 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 13:39:56 -0800 Subject: [PATCH 02/25] Adding Incidents to GraphQL API.. raiseIncident, updateIncidentStatus, and incidents fields on entities --- .../linkedin/datahub/graphql/Constants.java | 1 + .../datahub/graphql/GmsGraphQLEngine.java | 43 +- .../incident/EntityIncidentsResolver.java | 124 ++++++ .../incident/RaiseIncidentResolver.java | 129 ++++++ .../UpdateIncidentStatusResolver.java | 105 +++++ .../types/incident/IncidentMapper.java | 74 ++++ .../graphql/types/incident/IncidentType.java | 86 ++++ .../src/main/resources/entity.graphql | 5 + .../src/main/resources/incident.graphql | 368 ++++++++++++++++++ .../incident/EntityIncidentsResolverTest.java | 167 ++++++++ .../types/incident/IncidentMapperTest.java | 96 +++++ .../types/incident/IncidentTypeTest.java | 174 +++++++++ docs-website/sidebars.js | 6 +- docs/authorization/access-policies-guide.md | 2 +- docs/authorization/policies.md | 1 + .../incidents.md} | 51 ++- .../java/com/linkedin/metadata/Constants.java | 9 + .../metadata/service/IncidentServiceTest.java | 328 ++++++++++++++++ .../com/linkedin/incident/IncidentSource.pdl | 10 +- .../incident/IncidentServiceFactory.java | 36 ++ .../metadata/service/IncidentService.java | 221 +++++++++++ .../war/src/main/resources/boot/policies.json | 3 + .../authorization/PoliciesConfig.java | 9 +- 23 files changed, 2013 insertions(+), 35 deletions(-) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java create mode 100644 datahub-graphql-core/src/main/resources/incident.graphql create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java rename docs/{managed-datahub/datahub-api/graphql-api/incidents-api-beta.md => incidents/incidents.md} (82%) create mode 100644 metadata-io/src/test/java/com/linkedin/metadata/service/IncidentServiceTest.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/incident/IncidentServiceFactory.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentService.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 2bde7cb61047b..a647d0ae4e3bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -21,6 +21,7 @@ private Constants() {} public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; public static final String PROPERTIES_SCHEMA_FILE = "properties.graphql"; public static final String FORMS_SCHEMA_FILE = "forms.graphql"; + public static final String INCIDENTS_SCHEMA_FILE = "incident.graphql"; public static final String BROWSE_PATH_DELIMITER = "/"; public static final String BROWSE_PATH_V2_DELIMITER = "␟"; public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index fb9d837d6640c..3f1caaaf9fdfd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -63,6 +63,7 @@ import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTermAssociation; +import com.linkedin.datahub.graphql.generated.IncidentSource; import com.linkedin.datahub.graphql.generated.IngestionSource; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.LineageRelationship; @@ -158,6 +159,7 @@ import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver; +import com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateTestConnectionRequestResolver; @@ -305,6 +307,7 @@ import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; +import com.linkedin.datahub.graphql.types.incident.IncidentType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureType; import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType; @@ -460,6 +463,7 @@ public class GmsGraphQLEngine { private final DataTypeType dataTypeType; private final EntityTypeType entityTypeType; private final FormType formType; + private final IncidentType incidentType; private final int graphQLQueryComplexityLimit; private final int graphQLQueryDepthLimit; @@ -567,6 +571,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.dataTypeType = new DataTypeType(entityClient); this.entityTypeType = new EntityTypeType(entityClient); this.formType = new FormType(entityClient); + this.incidentType = new IncidentType(entityClient); this.graphQLQueryComplexityLimit = args.graphQLQueryComplexityLimit; this.graphQLQueryDepthLimit = args.graphQLQueryDepthLimit; @@ -609,7 +614,8 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { structuredPropertyType, dataTypeType, entityTypeType, - formType); + formType, + incidentType); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for those types @@ -698,6 +704,7 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configurePluginResolvers(builder); configureStructuredPropertyResolvers(builder); configureFormResolvers(builder); + configureIncidentResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { @@ -747,7 +754,8 @@ public GraphQLEngine.Builder builder() { .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)) .addSchema(fileBasedSchema(PROPERTIES_SCHEMA_FILE)) - .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)); + .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INCIDENTS_SCHEMA_FILE)); for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { List pluginSchemaFiles = plugin.getSchemaFiles(); @@ -2660,4 +2668,35 @@ private void configureIngestionSourceResolvers(final RuntimeWiring.Builder build : null; }))); } + + private void configureIncidentResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Incident", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "IncidentSource", + typeWiring -> + typeWiring.dataFetcher( + "source", + new LoadableTypeResolver<>( + this.assertionType, + (env) -> { + final IncidentSource incidentSource = env.getSource(); + return incidentSource.getSource() != null + ? incidentSource.getSource().getUrn() + : null; + }))); + + // Add incidents attribute to all entities that support it + final List entitiesWithIncidents = + ImmutableList.of("Dataset", "DataJob", "DataFlow", "Dashboard", "Chart"); + for (String entity : entitiesWithIncidents) { + builder.type( + entity, + typeWiring -> + typeWiring.dataFetcher("incidents", new EntityIncidentsResolver(entityClient))); + } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java new file mode 100644 index 0000000000000..c797044d1b224 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -0,0 +1,124 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.types.incident.IncidentMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityIncidentsResolver + implements DataFetcher> { + + static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; + + private final EntityClient _entityClient; + + public EntityIncidentsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final Optional maybeState = Optional.ofNullable(environment.getArgument("state")); + + try { + // Step 1: Fetch set of incidents associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildIncidentsEntityFilter(entityUrn, maybeState); + final SortCriterion sortCriterion = buildIncidentsSortCriterion(); + final SearchResult searchResult = + _entityClient.filter( + Constants.INCIDENT_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + + final List incidentUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + null, + context.getAuthentication()); + + // Step 3: Map GMS incident model to GraphQL model + final List entityResult = new ArrayList<>(); + for (Urn urn : incidentUrns) { + entityResult.add(entities.getOrDefault(urn, null)); + } + final List incidents = + entityResult.stream() + .filter(Objects::nonNull) + .map(IncidentMapper::map) + .collect(Collectors.toList()); + + // Step 4: Package and return result + final EntityIncidentsResult result = new EntityIncidentsResult(); + result.setCount(searchResult.getPageSize()); + result.setStart(searchResult.getFrom()); + result.setTotal(searchResult.getNumEntities()); + result.setIncidents(incidents); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); + } + + private Filter buildIncidentsEntityFilter( + final String entityUrn, final Optional maybeState) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + maybeState.ifPresent( + incidentState -> criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, incidentState)); + return QueryUtils.newFilter(criterionMap); + } + + private SortCriterion buildIncidentsSortCriterion() { + final SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + sortCriterion.setOrder(SortOrder.DESCENDING); + return sortCriterion; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java new file mode 100644 index 0000000000000..2314b3fab5b4a --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RaiseIncidentInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Resolver used for creating (raising) a new asset incident. */ +@Slf4j +@RequiredArgsConstructor +public class RaiseIncidentResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final RaiseIncidentInput input = + bindArgument(environment.getArgument("input"), RaiseIncidentInput.class); + final Urn resourceUrn = Urn.createFromString(input.getResourceUrn()); + + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToCreateIncidentForResource(resourceUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final IncidentKey key = new IncidentKey(); + + // Generate a random UUID for the incident + final String id = UUID.randomUUID().toString(); + key.setId(id); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + INCIDENT_ENTITY_NAME, + INCIDENT_INFO_ASPECT_NAME, + mapIncidentInfo(input, context)); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + log.error("Failed to create incident. {}", e.getMessage()); + throw new RuntimeException("Failed to incident", e); + } + }); + } + + private IncidentInfo mapIncidentInfo(final RaiseIncidentInput input, final QueryContext context) + throws URISyntaxException { + final IncidentInfo result = new IncidentInfo(); + result.setType( + IncidentType.valueOf( + input + .getType() + .name())); // Assumption Alert: This assumes that GMS incident type === GraphQL + // incident type. + result.setCustomType(input.getCustomType(), SetMode.IGNORE_NULL); + result.setTitle(input.getTitle(), SetMode.IGNORE_NULL); + result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + result.setEntities( + new UrnArray(ImmutableList.of(Urn.createFromString(input.getResourceUrn())))); + result.setCreated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + // Create the incident in the 'active' state by default. + result.setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + result.setSource(new IncidentSource().setType(IncidentSourceType.MANUAL), SetMode.IGNORE_NULL); + result.setPriority(input.getPriority(), SetMode.IGNORE_NULL); + return result; + } + + private boolean isAuthorizedToCreateIncidentForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java new file mode 100644 index 0000000000000..c9d3c23021d38 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java @@ -0,0 +1,105 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.UpdateIncidentStatusInput; +import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; + +/** GraphQL Resolver that updates an incident's status */ +@RequiredArgsConstructor +public class UpdateIncidentStatusResolver implements DataFetcher> { + + private final EntityClient _entityClient; + private final EntityService _entityService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final Urn incidentUrn = Urn.createFromString(environment.getArgument("urn")); + final UpdateIncidentStatusInput input = + bindArgument(environment.getArgument("input"), UpdateIncidentStatusInput.class); + return CompletableFuture.supplyAsync( + () -> { + + // Check whether the incident exists. + IncidentInfo info = + (IncidentInfo) + EntityUtils.getAspectFromEntity( + incidentUrn.toString(), INCIDENT_INFO_ASPECT_NAME, _entityService, null); + + if (info != null) { + // Check whether the actor has permission to edit the incident + // Currently only supporting a single entity. TODO: Support multiple incident entities. + final Urn resourceUrn = info.getEntities().get(0); + if (isAuthorizedToUpdateIncident(resourceUrn, context)) { + info.setStatus( + new IncidentStatus() + .setState(IncidentState.valueOf(input.getState().name())) + .setLastUpdated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + if (input.getMessage() != null) { + info.getStatus().setMessage(input.getMessage()); + } + try { + // Finally, create the MetadataChangeProposal. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + incidentUrn, INCIDENT_INFO_ASPECT_NAME, info); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to update incident status!", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + throw new DataHubGraphQLException( + "Failed to update incident. Incident does not exist.", + DataHubGraphQLErrorCode.NOT_FOUND); + }); + } + + private boolean isAuthorizedToUpdateIncident(final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java new file mode 100644 index 0000000000000..f3824f3237617 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java @@ -0,0 +1,74 @@ +package com.linkedin.datahub.graphql.types.incident; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.generated.IncidentSource; +import com.linkedin.datahub.graphql.generated.IncidentSourceType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.datahub.graphql.generated.IncidentStatus; +import com.linkedin.datahub.graphql.generated.IncidentType; +import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.metadata.Constants; + +/** Maps a GMS {@link EntityResponse} to a GraphQL incident. */ +public class IncidentMapper { + + public static Incident map(final EntityResponse entityResponse) { + final Incident result = new Incident(); + final Urn entityUrn = entityResponse.getUrn(); + final EnvelopedAspectMap aspects = entityResponse.getAspects(); + result.setType(EntityType.INCIDENT); + result.setUrn(entityUrn.toString()); + + final EnvelopedAspect envelopedIncidentInfo = aspects.get(Constants.INCIDENT_INFO_ASPECT_NAME); + if (envelopedIncidentInfo != null) { + final IncidentInfo info = new IncidentInfo(envelopedIncidentInfo.getValue().data()); + // Assumption alert! This assumes the incident type in GMS exactly equals that in GraphQL + result.setIncidentType(IncidentType.valueOf(info.getType().name())); + result.setCustomType(info.getCustomType(GetMode.NULL)); + result.setTitle(info.getTitle(GetMode.NULL)); + result.setDescription(info.getDescription(GetMode.NULL)); + result.setPriority(info.getPriority(GetMode.NULL)); + // TODO: Support multiple entities per incident. + result.setEntity(UrnToEntityMapper.map(info.getEntities().get(0))); + if (info.hasSource()) { + result.setSource(mapIncidentSource(info.getSource())); + } + if (info.hasStatus()) { + result.setStatus(mapStatus(info.getStatus())); + } + result.setCreated(AuditStampMapper.map(info.getCreated())); + } else { + throw new RuntimeException(String.format("Incident does not exist!. urn: %s", entityUrn)); + } + return result; + } + + private static IncidentStatus mapStatus( + final com.linkedin.incident.IncidentStatus incidentStatus) { + final IncidentStatus result = new IncidentStatus(); + result.setState(IncidentState.valueOf(incidentStatus.getState().name())); + result.setMessage(incidentStatus.getMessage(GetMode.NULL)); + result.setLastUpdated(AuditStampMapper.map(incidentStatus.getLastUpdated())); + return result; + } + + private static IncidentSource mapIncidentSource( + final com.linkedin.incident.IncidentSource incidentSource) { + final IncidentSource result = new IncidentSource(); + result.setType(IncidentSourceType.valueOf(incidentSource.getType().name())); + if (incidentSource.hasSourceUrn()) { + result.setSource(UrnToEntityMapper.map(incidentSource.getSourceUrn())); + } + return result; + } + + private IncidentMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java new file mode 100644 index 0000000000000..2e62bf5a0c345 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java @@ -0,0 +1,86 @@ +package com.linkedin.datahub.graphql.types.incident; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import graphql.execution.DataFetcherResult; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class IncidentType + implements com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + public IncidentType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.INCIDENT; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Incident.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List incidentUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : incidentUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(IncidentMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Incidents", e); + } + } + + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); + } + } +} diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 8cdce16b47ccc..2413cadfbb210 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -931,6 +931,11 @@ enum EntityType { """ CUSTOM_OWNERSHIP_TYPE + """ + A DataHub incident - SaaS only + """ + INCIDENT + """" A Role from an organisation """ diff --git a/datahub-graphql-core/src/main/resources/incident.graphql b/datahub-graphql-core/src/main/resources/incident.graphql new file mode 100644 index 0000000000000..13d1793739c15 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/incident.graphql @@ -0,0 +1,368 @@ +extend type Mutation { + """ + Create a new incident for a resource (asset) + """ + raiseIncident( + """ + Input required to create a new incident + """ + input: RaiseIncidentInput!): String + + """ + Update an existing incident for a resource (asset) + """ + updateIncidentStatus( + """ + The urn for an existing incident + """ + urn: String! + + """ + Input required to update the state of an existing incident + """ + input: UpdateIncidentStatusInput!): Boolean +} + +""" +A list of Incidents Associated with an Entity +""" +type EntityIncidentsResult { + """ + The starting offset of the result set returned + """ + start: Int! + + """ + The number of assertions in the returned result set + """ + count: Int! + + """ + The total number of assertions in the result set + """ + total: Int! + + """ + The incidents themselves + """ + incidents: [Incident!]! +} + +""" +An incident represents an active issue on a data asset. +""" +type Incident implements Entity { + """ + The primary key of the Incident + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + The type of incident + """ + incidentType: IncidentType! + + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + + """ + An optional title associated with the incident + """ + title: String + + """ + An optional description associated with the incident + """ + description: String + + """ + The status of an incident + """ + status: IncidentStatus! + + """ + Optional priority of the incident. Lower value indicates higher priority. + """ + priority: Int + + """ + The entity that the incident is associated with. + """ + entity: Entity! + + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSource + + """ + The time at which the incident was initially created + """ + created: AuditStamp! + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The state of an incident. +""" +enum IncidentState { + """ + The incident is ongoing, or active. + """ + ACTIVE + """ + The incident is resolved. + """ + RESOLVED +} + +""" +A specific type of incident +""" +enum IncidentType { + """ + A Freshness Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + FRESHNESS + + """ + A Volume Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + VOLUME + + """ + An assertion on a particular column(s) of a Dataset has triggered the incident. + Raised on Datasets where assertions are configured to generate incidents. + """ + DATASET_COLUMN + + """ + An assertion on the row count of a Dataset has triggered the incident. + Raised on datasets where assertions are configured to generate incidents. + """ + DATASET_ROWS + + """ + An operational incident, e.g. failure to materialize a dataset, or failure to execute a task / pipeline. + """ + OPERATIONAL + + """ + A custom type of incident + """ + CUSTOM +} + + +""" +Details about the status of an asset incident +""" +type IncidentStatus { + """ + The state of the incident + """ + state: IncidentState! + """ + An optional message associated with the status + """ + message: String + """ + The time that the status last changed + """ + lastUpdated: AuditStamp! +} + +""" +The source type of an incident, implying how it was created. +""" +enum IncidentSourceType { + """ + The incident was created manually, from either the API or the UI. + """ + MANUAL + """ + An assertion has failed, triggering the incident. + """ + ASSERTION_FAILURE +} + +""" +Details about the source of an incident, e.g. how it was created. +""" +type IncidentSource { + """ + The type of the incident source + """ + type: IncidentSourceType! + + """ + The source of the incident. If the source type is ASSERTION_FAILURE, this will have the assertion that generated the incident. + """ + source: Entity +} + +""" +Input required to create a new incident in the 'Active' state. +""" +input RaiseIncidentInput { + """ + The type of incident + """ + type: IncidentType! + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + """ + An optional title associated with the incident + """ + title: String + """ + An optional description associated with the incident + """ + description: String + """ + The resource (dataset, dashboard, chart, dataFlow, etc) that the incident is associated with. + """ + resourceUrn: String! + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSourceInput + """ + An optional priority for the incident. Lower value indicates a higher priority. + """ + priority: Int +} + +""" +Input required to create an incident source +""" +input IncidentSourceInput { + """ + The type of the incident source + """ + type: IncidentSourceType! +} + +""" +Input required to update status of an existing incident +""" +input UpdateIncidentStatusInput { + """ + The new state of the incident + """ + state: IncidentState! + """ + An optional message associated with the new state + """ + message: String +} + +extend type Dataset { + """ + Incidents associated with the Dataset + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataJob { + """ + Incidents associated with the DataJob + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataFlow { + """ + Incidents associated with the DataFlow + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Dashboard { + """ + Incidents associated with the Dashboard + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Chart { + """ + Incidents associated with the Chart + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} \ No newline at end of file diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java new file mode 100644 index 0000000000000..41e9458243a07 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -0,0 +1,167 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import graphql.schema.DataFetchingEnvironment; +import java.util.HashMap; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class EntityIncidentsResolverTest { + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + Urn incidentUrn = Urn.createFromString("urn:li:incident:test-guid"); + + Map incidentAspects = new HashMap<>(); + incidentAspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new IncidentKey().setId("test-guid").data()))); + + IncidentInfo expectedInfo = + new IncidentInfo() + .setType(IncidentType.DATASET_COLUMN) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(datasetUrn))) + .setSource( + new IncidentSource() + .setType(IncidentSourceType.ASSERTION_FAILURE) + .setSourceUrn(assertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(userUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(userUrn)); + + incidentAspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(expectedInfo.data()))); + + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, datasetUrn.toString()); + Filter expectedFilter = QueryUtils.newFilter(criterionMap); + + SortCriterion expectedSort = new SortCriterion(); + expectedSort.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + expectedSort.setOrder(SortOrder.DESCENDING); + + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(expectedFilter), + Mockito.eq(expectedSort), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(incidentUrn))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + incidentUrn, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn) + .setAspects(new EnvelopedAspectMap(incidentAspects)))); + + EntityIncidentsResolver resolver = new EntityIncidentsResolver(mockClient); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("count"), Mockito.eq(20))).thenReturn(10); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentEntity = new Dataset(); + parentEntity.setUrn(datasetUrn.toString()); + Mockito.when(mockEnv.getSource()).thenReturn(parentEntity); + + EntityIncidentsResult result = resolver.get(mockEnv).get(); + + // Assert that GraphQL Incident run event matches expectations + assertEquals(result.getStart(), 0); + assertEquals(result.getCount(), 1); + assertEquals(result.getTotal(), 1); + + com.linkedin.datahub.graphql.generated.Incident incident = + resolver.get(mockEnv).get().getIncidents().get(0); + assertEquals(incident.getUrn(), incidentUrn.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), expectedInfo.getType().toString()); + assertEquals(incident.getTitle(), expectedInfo.getTitle()); + assertEquals(incident.getDescription(), expectedInfo.getDescription()); + assertEquals(incident.getCustomType(), expectedInfo.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), expectedInfo.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), expectedInfo.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + expectedInfo.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + expectedInfo.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), expectedInfo.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + expectedInfo.getSource().getSourceUrn().toString()); + assertEquals(incident.getCreated().getActor(), expectedInfo.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), expectedInfo.getCreated().getTime()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java new file mode 100644 index 0000000000000..f0586929a39f1 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java @@ -0,0 +1,96 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import java.util.Collections; +import org.testng.annotations.Test; + +public class IncidentMapperTest { + + @Test + public void testMap() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:incident:1"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedIncidentInfo = new EnvelopedAspect(); + IncidentInfo incidentInfo = new IncidentInfo(); + incidentInfo.setType(IncidentType.DATASET_COLUMN); + incidentInfo.setCustomType("Custom Type"); + incidentInfo.setTitle("Test Incident", SetMode.IGNORE_NULL); + incidentInfo.setDescription("This is a test incident", SetMode.IGNORE_NULL); + incidentInfo.setPriority(1, SetMode.IGNORE_NULL); + incidentInfo.setEntities(new UrnArray(Collections.singletonList(urn))); + + IncidentSource source = new IncidentSource(); + source.setType(IncidentSourceType.ASSERTION_FAILURE); + source.setSourceUrn(assertionUrn); + incidentInfo.setSource(source); + + AuditStamp lastStatus = new AuditStamp(); + lastStatus.setTime(1000L); + lastStatus.setActor(userUrn); + incidentInfo.setCreated(lastStatus); + + IncidentStatus status = new IncidentStatus(); + status.setState(IncidentState.ACTIVE); + status.setLastUpdated(lastStatus); + status.setMessage("This incident is open.", SetMode.IGNORE_NULL); + incidentInfo.setStatus(status); + + AuditStamp created = new AuditStamp(); + created.setTime(1000L); + created.setActor(userUrn); + incidentInfo.setCreated(created); + + envelopedIncidentInfo.setValue(new Aspect(incidentInfo.data())); + entityResponse.setAspects( + new EnvelopedAspectMap( + Collections.singletonMap(Constants.INCIDENT_INFO_ASPECT_NAME, envelopedIncidentInfo))); + + Incident incident = IncidentMapper.map(entityResponse); + + assertNotNull(incident); + assertEquals(incident.getUrn(), "urn:li:incident:1"); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getCustomType(), "Custom Type"); + assertEquals( + incident.getIncidentType().toString(), + com.linkedin.datahub.graphql.generated.IncidentType.DATASET_COLUMN.toString()); + assertEquals(incident.getTitle(), "Test Incident"); + assertEquals(incident.getDescription(), "This is a test incident"); + assertEquals(incident.getPriority().intValue(), 1); + assertEquals( + incident.getSource().getType().toString(), + com.linkedin.datahub.graphql.generated.IncidentSourceType.ASSERTION_FAILURE.toString()); + assertEquals(incident.getSource().getSource().getUrn(), assertionUrn.toString()); + assertEquals( + incident.getStatus().getState().toString(), + com.linkedin.datahub.graphql.generated.IncidentState.ACTIVE.toString()); + assertEquals(incident.getStatus().getMessage(), "This incident is open."); + assertEquals(incident.getStatus().getLastUpdated().getTime().longValue(), 1000L); + assertEquals(incident.getStatus().getLastUpdated().getActor(), userUrn.toString()); + assertEquals(incident.getCreated().getTime().longValue(), 1000L); + assertEquals(incident.getCreated().getActor(), userUrn.toString()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java new file mode 100644 index 0000000000000..219339e1d65b8 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java @@ -0,0 +1,174 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.r2.RemoteInvocationException; +import graphql.execution.DataFetcherResult; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IncidentTypeTest { + + private static final String TEST_INCIDENT_URN = "urn:li:incident:guid-1"; + private static Urn testAssertionUrn; + private static Urn testUserUrn; + private static Urn testDatasetUrn; + + static { + try { + testAssertionUrn = Urn.createFromString("urn:li:assertion:test"); + testUserUrn = Urn.createFromString("urn:li:corpuser:test"); + testDatasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + } catch (Exception ignored) { + // ignored + } + } + + private static final IncidentKey TEST_INCIDENT_KEY = new IncidentKey().setId("guid-1"); + private static final IncidentInfo TEST_INCIDENT_INFO = + new IncidentInfo() + .setType(IncidentType.DATASET_COLUMN) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(testDatasetUrn))) + .setSource( + new IncidentSource() + .setType(IncidentSourceType.ASSERTION_FAILURE) + .setSourceUrn(testAssertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(testUserUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(testUserUrn)); + private static final String TEST_INCIDENT_URN_2 = "urn:li:incident:guid-2"; + + @Test + public void testBatchLoad() throws Exception { + + EntityClient client = Mockito.mock(EntityClient.class); + + Urn incidentUrn1 = Urn.createFromString(TEST_INCIDENT_URN); + Urn incidentUrn2 = Urn.createFromString(TEST_INCIDENT_URN_2); + + Map incident1Aspects = new HashMap<>(); + incident1Aspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_KEY.data()))); + incident1Aspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(incidentUrn1, incidentUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + incidentUrn1, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn1) + .setAspects(new EnvelopedAspectMap(incident1Aspects)))); + + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + List> result = + type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn1, incidentUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + Incident incident = result.get(0).getData(); + assertEquals(incident.getUrn(), TEST_INCIDENT_URN.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), TEST_INCIDENT_INFO.getType().toString()); + assertEquals(incident.getTitle(), TEST_INCIDENT_INFO.getTitle()); + assertEquals(incident.getDescription(), TEST_INCIDENT_INFO.getDescription()); + assertEquals(incident.getCustomType(), TEST_INCIDENT_INFO.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), + TEST_INCIDENT_INFO.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), TEST_INCIDENT_INFO.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), + TEST_INCIDENT_INFO.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + TEST_INCIDENT_INFO.getSource().getSourceUrn().toString()); + assertEquals( + incident.getCreated().getActor(), TEST_INCIDENT_INFO.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), TEST_INCIDENT_INFO.getCreated().getTime()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), context)); + } +} diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 1e6d8bec01813..094ab88588706 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -56,6 +56,7 @@ module.exports = { "docs/features/dataset-usage-and-query-history", "docs/posts", "docs/sync-status", + "docs/incidents/incidents", "docs/generated/lineage/lineage-feature-guide", { type: "doc", @@ -129,11 +130,6 @@ module.exports = { { "GraphQL API": [ "docs/managed-datahub/datahub-api/graphql-api/getting-started", - { - type: "doc", - id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", - className: "saasOnly", - }, ], }, ], diff --git a/docs/authorization/access-policies-guide.md b/docs/authorization/access-policies-guide.md index 1eabb64d2878f..436f70b6d8c85 100644 --- a/docs/authorization/access-policies-guide.md +++ b/docs/authorization/access-policies-guide.md @@ -209,6 +209,7 @@ The common Metadata Privileges, which span across entity types, include: | Edit Deprecation | Allow actor to edit the Deprecation status of an entity. | | Edit Lineage | Allow actor to edit custom lineage edges for the entity. | | Edit Data Product | Allow actor to edit the data product that an entity is part of | +| Edit Incidents | Allow actor to raise and resolve incidents associated with an entity. | | Propose Tags | (Acryl DataHub only) Allow actor to propose new Tags for the entity. | | Propose Glossary Terms | (Acryl DataHub only) Allow actor to propose new Glossary Terms for the entity. | | Propose Documentation | (Acryl DataHub only) Allow actor to propose new Documentation for the entity. | @@ -236,7 +237,6 @@ The common Metadata Privileges, which span across entity types, include: | Dataset | View Dataset Usage | Allow actor to access usage metadata about a dataset both in the UI and in the GraphQL API. This includes example queries, number of queries, etc. Also applies to REST APIs when REST API Authorization is enabled. | | Dataset | View Dataset Profile | Allow actor to access a dataset's profile both in the UI and in the GraphQL API. This includes snapshot statistics like #rows, #columns, null percentage per field, etc. | | Dataset | Edit Assertions | Allow actor to change the assertions associated with a dataset. | -| Dataset | Edit Incidents | (Acryl DataHub only) Allow actor to change the incidents associated with a dataset. | | Dataset | Edit Monitors | (Acryl DataHub only) Allow actor to change the assertion monitors associated with a dataset. | | Tag | Edit Tag Color | Allow actor to change the color of a Tag. | | Group | Edit Group Members | Allow actor to add and remove members to a group. | diff --git a/docs/authorization/policies.md b/docs/authorization/policies.md index 63aa6688d3eec..a50764b04d566 100644 --- a/docs/authorization/policies.md +++ b/docs/authorization/policies.md @@ -106,6 +106,7 @@ We currently support the following: | Edit Domain | Allow actor to edit the Domain of an entity. | | Edit Deprecation | Allow actor to edit the Deprecation status of an entity. | | Edit Assertions | Allow actor to add and remove assertions from an entity. | +| Edit Incidents | Allow actor to raise and resolve incidents for an entity. | | Edit All | Allow actor to edit any information about an entity. Super user privileges. Controls the ability to ingest using API when REST API Authorization is enabled. | | | Get Timeline API[^1] | Allow actor to get the timeline of an entity via API. | | Get Entity API[^1] | Allow actor to get an entity via API. | diff --git a/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md b/docs/incidents/incidents.md similarity index 82% rename from docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md rename to docs/incidents/incidents.md index 16d83d2f57575..5f51e421aad3b 100644 --- a/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md +++ b/docs/incidents/incidents.md @@ -4,8 +4,9 @@ description: This page provides an overview of working with the DataHub Incident import FeatureAvailability from '@site/src/components/FeatureAvailability'; -# Incidents API (Beta) - +# About Incidents + + ## Introduction @@ -13,24 +14,44 @@ import FeatureAvailability from '@site/src/components/FeatureAvailability'; A couple scenarios in which incidents can be useful are -1. **Pipeline Circuit Breaking:** You can use Incidents as the basis for intelligent data pipelines that verify upstream inputs (e.g. datasets) are free of any active incidents before executing. -2. \[Coming Soon] **Announcing Known-Bad Assets**: You can mark a known-bad data asset as under an ongoing incident so consumers and stakeholders can be informed about the health status of a data asset via the DataHub UI. Moreover, they can follow the incident as it progresses toward resolution. +1**Communicating Assets with Ongoing Issues**: You can mark a known-bad data asset as under an ongoing incident so consumers and stakeholders can be informed about the health status of a data asset via the DataHub UI. Moreover, they can follow the incident as it progresses toward resolution. +2**Pipeline Circuit Breaking (advanced):** You can use Incidents as a basis for orchestrating and blocking data pipelines that have inputs with active issues to avoid propagating bad data downstream. -In the next section, we'll show you how to +In the next section, we'll walk through how to 1. Create a new incident 2. Fetch all incidents for a data asset 3. Resolve an incident -for **Datasets** using the Acryl [GraphQL API](docs/api/graphql/overview.md). +for **Datasets**, **Dashboards**, **Charts**, **Data Pipelines** (Data Flows), and **Data Tasks** (Data Jobs) using the DataHub UI or [GraphQL API](docs/api/graphql/overview.md). Let's get started! ## Creating an Incident -:::info -Creating incidents is currently only supported against **Dataset** assets. -::: +To create an incident, simply navigate to the profile page for the asset of interest, click +the 3-dot menu icon on the right side of the header, and click **Raise Incident**. + +Choose an existing type, or define your own, and then author a title and description of the issue. Finally, +click `Add` to create the new issue. This will mark the asset with a health status badge indicating that it +is possibly unfit for use due to an ongoing issue. + +## Resolving an Incident + +To resolve an incident, simply naviagte to the profile page for the asset of interest, click +the **Incidents** tab, and then click the **Resolve** button for the incident of interest. +This will resolve the incident from the list of active incidents for the asset, removing it from the +asset's health status. + +## Finding Assets with Active Incidents + +To view all assets with active incidents, simply apply the `Has Active Incidents` filter on the search results page of DataHub. +To view all assets first, click **Explore all** on the DataHub homepage. + +## Creating an Incident via API + +Oftentimes it is desirable to raise and resolve incidents for particular data assets in automated fashion using the DataHub API, e.g. as part of an +orchestration pipeline. To create (i.e. raise) a new incident for a data asset, simply create a GraphQL request using the `raiseIncident` mutation. @@ -151,8 +172,6 @@ res_data = response.json() # Get result as JSON To fetch the the ongoing incidents for a data asset, we can use the `incidents` GraphQL field on the entity of interest. -### Datasets - To retrieve all incidents for a Dataset with a particular [URN](docs/what/urn.md), you can reference the 'incidents' field of the Dataset type: ``` @@ -276,7 +295,7 @@ response.raise_for_status() res_data = response.json() # Get result as JSON ``` -## Resolving an Incident +## Resolving an Incident via API To resolve an incident for a data asset, simply create a GraphQL request using the `updateIncidentStatus` mutation. To mark an incident as resolved, simply update its state to `RESOLVED`. @@ -395,20 +414,18 @@ Authorization: Bearer **Exploring GraphQL API** -Also, remember that you can play with an interactive version of the Acryl GraphQL API at `https://your-account-id.acryl.io/api/graphiql` +Also, remember that you can play with an interactive version of the GraphQL API at `https://your-account-id.acryl.io/api/graphiql` ::: -## Enabling Slack Notifications +## Enabling Slack Notifications (Acryl Cloud Only) -You can configure Acryl to send slack notifications to a specific channel when incidents are raised or their status is changed. +In Acryl Cloud, you can configure your to send Slack notifications to a specific channel when incidents are raised or their status is changed. These notifications are also able to tag the immediate asset's owners, along with the owners of downstream assets consuming it. -

- To do so, simply follow the [Slack Integration Guide](docs/managed-datahub/saas-slack-setup.md) and contact your Acryl customer success team to enable the feature! diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 39a17612aa4b3..47db240d51a5b 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -60,6 +60,7 @@ public class Constants { public static final String CONTAINER_ENTITY_NAME = "container"; public static final String DOMAIN_ENTITY_NAME = "domain"; public static final String ASSERTION_ENTITY_NAME = "assertion"; + public static final String INCIDENT_ENTITY_NAME = "incident"; public static final String INGESTION_SOURCE_ENTITY_NAME = "dataHubIngestionSource"; public static final String SECRETS_ENTITY_NAME = "dataHubSecret"; public static final String EXECUTION_REQUEST_ENTITY_NAME = "dataHubExecutionRequest"; @@ -100,6 +101,7 @@ public class Constants { public static final String ORIGIN_ASPECT_NAME = "origin"; public static final String INPUT_FIELDS_ASPECT_NAME = "inputFields"; public static final String EMBED_ASPECT_NAME = "embed"; + public static final String INCIDENTS_SUMMARY_ASPECT_NAME = "incidentsSummary"; // User public static final String CORP_USER_KEY_ASPECT_NAME = "corpUserKey"; @@ -272,6 +274,10 @@ public class Constants { public static final String TEST_INFO_ASPECT_NAME = "testInfo"; public static final String TEST_RESULTS_ASPECT_NAME = "testResults"; + // Incident + public static final String INCIDENT_KEY_ASPECT_NAME = "incidentKey"; + public static final String INCIDENT_INFO_ASPECT_NAME = "incidentInfo"; + // DataHub Ingestion Source public static final String INGESTION_SOURCE_KEY_ASPECT_NAME = "dataHubIngestionSourceKey"; public static final String INGESTION_INFO_ASPECT_NAME = "dataHubIngestionSourceInfo"; @@ -386,6 +392,9 @@ public class Constants { public static final String DATA_FLOW_URN_KEY = "dataFlowUrn"; public static final String DATA_JOB_URN_KEY = "dataJobUrn"; + // Incidents + public static final String ENTITY_REF = "entities"; + // Config public static final String ELASTICSEARCH_IMPLEMENTATION_OPENSEARCH = "opensearch"; public static final String ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH = "elasticsearch"; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentServiceTest.java new file mode 100644 index 0000000000000..2f3abbeb8435e --- /dev/null +++ b/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentServiceTest.java @@ -0,0 +1,328 @@ +package com.linkedin.metadata.service; + +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Collections; +import org.mockito.Mockito; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class IncidentServiceTest { + + private static final Urn TEST_INCIDENT_URN = UrnUtils.getUrn("urn:li:incident:test"); + private static final Urn TEST_NON_EXISTENT_INCIDENT_URN = + UrnUtils.getUrn("urn:li:incident:test-non-existant"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"); + private static final Urn TEST_NON_EXISTENT_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,non-existant,PROD)"); + private static final Urn TEST_USER_URN = UrnUtils.getUrn(SYSTEM_ACTOR); + + @Test + private void testGetIncidentInfo() throws Exception { + final EntityClient mockClient = createMockEntityClient(); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + + // Case 1: Info exists + IncidentInfo info = service.getIncidentInfo(TEST_INCIDENT_URN); + Assert.assertEquals(info, mockIncidentInfo()); + Mockito.verify(mockClient, Mockito.times(1)) + .getV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(TEST_INCIDENT_URN), + Mockito.eq(ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + // Case 2: Info does not exist + info = service.getIncidentInfo(TEST_NON_EXISTENT_INCIDENT_URN); + Assert.assertNull(info); + Mockito.verify(mockClient, Mockito.times(1)) + .getV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(TEST_NON_EXISTENT_INCIDENT_URN), + Mockito.eq(ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + } + + @Test + private void testGetIncidentsSummary() throws Exception { + final EntityClient mockClient = createMockEntityClient(); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + + // Case 1: Summary exists + IncidentsSummary summary = service.getIncidentsSummary(TEST_DATASET_URN); + Assert.assertEquals(summary, mockIncidentSummary()); + Mockito.verify(mockClient, Mockito.times(1)) + .getV2( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableSet.of(INCIDENTS_SUMMARY_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + // Case 2: Summary does not exist + summary = service.getIncidentsSummary(TEST_NON_EXISTENT_DATASET_URN); + Assert.assertNull(summary); + Mockito.verify(mockClient, Mockito.times(1)) + .getV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableSet.of(Constants.INCIDENTS_SUMMARY_ASPECT_NAME)), + Mockito.any(Authentication.class)); + } + + @Test + private void testUpdateIncidentsSummary() throws Exception { + final EntityClient mockClient = createMockEntityClient(); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + service.updateIncidentsSummary(TEST_DATASET_URN, mockIncidentSummary()); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq(mockIncidentSummaryMcp()), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + private void testRaiseIncidentRequiredFields() throws Exception { + final EntityClient mockClient = Mockito.mock(EntityClient.class); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + service.raiseIncident( + IncidentType.FRESHNESS, + null, + null, + null, + null, + ImmutableList.of(TEST_DATASET_URN), + null, + UrnUtils.getUrn(SYSTEM_ACTOR), + null); + + final IncidentInfo expectedInfo = + new IncidentInfo() + .setType(IncidentType.FRESHNESS) + .setEntities(new UrnArray(ImmutableList.of(TEST_DATASET_URN))) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN))) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new IncidentInfoArgumentMatcher( + AspectUtils.buildMetadataChangeProposal( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, expectedInfo))), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + private void testRaiseIncidentAllFields() throws Exception { + final EntityClient mockClient = Mockito.mock(EntityClient.class); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + service.raiseIncident( + IncidentType.FRESHNESS, + "custom type", + 2, + "title", + "description", + ImmutableList.of(TEST_DATASET_URN), + new IncidentSource().setType(IncidentSourceType.ASSERTION_FAILURE), + UrnUtils.getUrn(SYSTEM_ACTOR), + "message"); + + final IncidentInfo expectedInfo = + new IncidentInfo() + .setType(IncidentType.FRESHNESS) + .setCustomType("custom type") + .setPriority(2) + .setTitle("title") + .setDescription("description") + .setEntities(new UrnArray(ImmutableList.of(TEST_DATASET_URN))) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setMessage("message")) + .setSource(new IncidentSource().setType(IncidentSourceType.ASSERTION_FAILURE)) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new IncidentInfoArgumentMatcher( + AspectUtils.buildMetadataChangeProposal( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, expectedInfo))), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + private void testUpdateIncidentStatus() throws Exception { + final EntityClient mockClient = createMockEntityClient(); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + service.updateIncidentStatus( + TEST_INCIDENT_URN, IncidentState.RESOLVED, TEST_USER_URN, "message"); + + IncidentInfo expectedInfo = new IncidentInfo(mockIncidentInfo().data()); + expectedInfo.setStatus( + new IncidentStatus() + .setState(IncidentState.RESOLVED) + .setMessage("message") + .setLastUpdated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L))); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new IncidentInfoArgumentMatcher( + AspectUtils.buildMetadataChangeProposal( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, expectedInfo))), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + private void testDeleteIncident() throws Exception { + final EntityClient mockClient = Mockito.mock(EntityClient.class); + final IncidentService service = + new IncidentService(mockClient, Mockito.mock(Authentication.class)); + service.deleteIncident(TEST_INCIDENT_URN); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_INCIDENT_URN), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntityReferences(Mockito.eq(TEST_INCIDENT_URN), Mockito.any(Authentication.class)); + } + + private static EntityClient createMockEntityClient() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + + // Init for incident info + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(TEST_INCIDENT_URN), + Mockito.eq(ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_INCIDENT_URN) + .setEntityName(INCIDENT_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + INCIDENT_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(mockIncidentInfo().data())))))); + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(TEST_NON_EXISTENT_INCIDENT_URN), + Mockito.eq(ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_NON_EXISTENT_INCIDENT_URN) + .setEntityName(INCIDENT_ENTITY_NAME) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap()))); + + // Init for incidents summary + Mockito.when( + mockClient.getV2( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableSet.of(INCIDENTS_SUMMARY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_DATASET_URN) + .setEntityName(DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + INCIDENTS_SUMMARY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(mockIncidentSummary().data())))))); + Mockito.when( + mockClient.getV2( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.eq(TEST_NON_EXISTENT_DATASET_URN), + Mockito.eq(ImmutableSet.of(INCIDENTS_SUMMARY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_NON_EXISTENT_DATASET_URN) + .setEntityName(DATASET_ENTITY_NAME) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap()))); + + // Init for update summary + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(mockIncidentSummaryMcp()), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_DATASET_URN.toString()); + + return mockClient; + } + + private static IncidentInfo mockIncidentInfo() throws Exception { + return new IncidentInfo() + .setType(IncidentType.FRESHNESS) + .setEntities(new UrnArray(ImmutableList.of(TEST_DATASET_URN))) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN))) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); + } + + private static IncidentsSummary mockIncidentSummary() throws Exception { + final IncidentsSummary summary = new IncidentsSummary(); + summary.setResolvedIncidents(new UrnArray(ImmutableList.of(TEST_INCIDENT_URN))); + return summary; + } + + private static MetadataChangeProposal mockIncidentSummaryMcp() throws Exception { + + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(TEST_DATASET_URN); + mcp.setEntityType(DATASET_ENTITY_NAME); + mcp.setAspectName(INCIDENTS_SUMMARY_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(mockIncidentSummary())); + + return mcp; + } +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl index 26fa7ed2f540a..2f8912da5458c 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentSource.pdl @@ -22,18 +22,10 @@ record IncidentSource { * Manually created incident, via UI or API. */ MANUAL - - /** - * An assertion has failed, triggering the incident. - */ - ASSERTION_FAILURE } /** - * Reference to an URN related to the source of an incident. Currently populated when the type of the incident - * is ASSERTION_FAILURE. - * - * If this incident was raised due to an assertion failure, then this will contain the urn of the source Assertion. + * Reference to an URN related to the source of an incident. */ @Searchable = { "fieldType": "URN" diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/incident/IncidentServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/incident/IncidentServiceFactory.java new file mode 100644 index 0000000000000..6fc9d6e1d7ad6 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/incident/IncidentServiceFactory.java @@ -0,0 +1,36 @@ +package com.linkedin.gms.factory.incident; + +import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.metadata.service.IncidentService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.Scope; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +@Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) +public class IncidentServiceFactory { + @Autowired + @Qualifier("restliEntityClient") + private EntityClient _entityClient; + + @Autowired + @Qualifier("systemAuthentication") + private Authentication _authentication; + + @Bean(name = "incidentService") + @Scope("singleton") + @Nonnull + protected IncidentService getInstance() throws Exception { + return new IncidentService(_entityClient, _authentication); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentService.java new file mode 100644 index 0000000000000..7adf291fc199d --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentService.java @@ -0,0 +1,221 @@ +package com.linkedin.metadata.service; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.utils.EntityKeyUtils; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IncidentService extends BaseService { + + public IncidentService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { + super(entityClient, systemAuthentication); + } + + /** + * Returns an instance of {@link IncidentInfo} for the specified Incident urn, or null if one + * cannot be found. + * + * @param incidentUrn the urn of the Incident + * @return an instance of {@link IncidentInfo} for the Incident, null if it does not exist. + */ + @Nullable + public IncidentInfo getIncidentInfo(@Nonnull final Urn incidentUrn) { + Objects.requireNonNull(incidentUrn, "incidentUrn must not be null"); + final EntityResponse response = + getIncidentEntityResponse(incidentUrn, this.systemAuthentication); + if (response != null + && response.getAspects().containsKey(Constants.INCIDENT_INFO_ASPECT_NAME)) { + return new IncidentInfo( + response.getAspects().get(Constants.INCIDENT_INFO_ASPECT_NAME).getValue().data()); + } + // No aspect found + return null; + } + + /** + * Returns an instance of {@link IncidentsSummary} for the specified Entity urn, or null if one + * cannot be found. + * + * @param entityUrn the urn of the entity to retrieve the summary for + * @return an instance of {@link IncidentsSummary} for the Entity, null if it does not exist. + */ + @Nullable + public IncidentsSummary getIncidentsSummary(@Nonnull final Urn entityUrn) { + Objects.requireNonNull(entityUrn, "entityUrn must not be null"); + final EntityResponse response = + getIncidentsSummaryResponse(entityUrn, this.systemAuthentication); + if (response != null + && response.getAspects().containsKey(Constants.INCIDENTS_SUMMARY_ASPECT_NAME)) { + return new IncidentsSummary( + response.getAspects().get(Constants.INCIDENTS_SUMMARY_ASPECT_NAME).getValue().data()); + } + // No aspect found + return null; + } + + /** + * Produces a Metadata Change Proposal to update the IncidentsSummary aspect for a given entity. + */ + public void updateIncidentsSummary( + @Nonnull final Urn entityUrn, @Nonnull final IncidentsSummary newSummary) throws Exception { + Objects.requireNonNull(entityUrn, "entityUrn must not be null"); + Objects.requireNonNull(newSummary, "newSummary must not be null"); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.INCIDENTS_SUMMARY_ASPECT_NAME, newSummary), + this.systemAuthentication, + false); + } + + /** Deletes an incident with a given URN */ + public void deleteIncident(@Nonnull final Urn incidentUrn) throws Exception { + Objects.requireNonNull(incidentUrn, "incidentUrn must not be null"); + this.entityClient.deleteEntity(incidentUrn, this.systemAuthentication); + this.entityClient.deleteEntityReferences(incidentUrn, this.systemAuthentication); + } + + /** Updates an existing incident's status. */ + public Urn raiseIncident( + @Nonnull final IncidentType type, + @Nullable final String customType, + @Nullable final Integer priority, + @Nullable final String title, + @Nullable final String description, + @Nonnull final List entityUrns, + @Nullable final IncidentSource source, + @Nonnull final Urn actor, + @Nullable final String message) + throws Exception { + Objects.requireNonNull(type, "type must not be null"); + Objects.requireNonNull(entityUrns, "entityUrns must not be null"); + Objects.requireNonNull(actor, "actor must not be null"); + + final IncidentKey key = new IncidentKey(); + final String id = UUID.randomUUID().toString(); + key.setId(id); + final Urn urn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.INCIDENT_ENTITY_NAME); + + final IncidentInfo newInfo = new IncidentInfo(); + newInfo.setType(type); + newInfo.setCustomType(customType, SetMode.IGNORE_NULL); + newInfo.setPriority(priority, SetMode.IGNORE_NULL); + newInfo.setTitle(title, SetMode.IGNORE_NULL); + newInfo.setDescription(description, SetMode.IGNORE_NULL); + newInfo.setEntities(new UrnArray(entityUrns)); + newInfo.setSource(source, SetMode.IGNORE_NULL); + newInfo.setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage(message, SetMode.IGNORE_NULL) + .setLastUpdated(new AuditStamp().setActor(actor).setTime(System.currentTimeMillis()))); + newInfo.setCreated(new AuditStamp().setActor(actor).setTime(System.currentTimeMillis())); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal(urn, Constants.INCIDENT_INFO_ASPECT_NAME, newInfo), + this.systemAuthentication, + false); + return urn; + } + + /** Updates an existing incident's status. */ + public void updateIncidentStatus( + @Nonnull final Urn urn, + @Nonnull final IncidentState state, + @Nonnull final Urn actor, + @Nullable final String message) + throws Exception { + Objects.requireNonNull(urn, "urn must not be null"); + Objects.requireNonNull(state, "state must not be null"); + Objects.requireNonNull(actor, "actor must not be null"); + final IncidentInfo existingInfo = getIncidentInfo(urn); + if (existingInfo != null) { + final IncidentStatus newStatus = + new IncidentStatus() + .setState(state) + .setLastUpdated(new AuditStamp().setActor(actor).setTime(System.currentTimeMillis())) + .setMessage(message, SetMode.IGNORE_NULL); + existingInfo.setStatus(newStatus); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.INCIDENT_INFO_ASPECT_NAME, existingInfo), + this.systemAuthentication, + false); + } else { + throw new IllegalArgumentException( + String.format("Failed to find incident with urn %s. Incident may not exist!", urn)); + } + } + + /** + * Returns an instance of {@link EntityResponse} for the specified View urn, or null if one cannot + * be found. + * + * @param incidentUrn the urn of the View + * @param authentication the authentication to use + * @return an instance of {@link EntityResponse} for the View, null if it does not exist. + */ + @Nullable + private EntityResponse getIncidentEntityResponse( + @Nonnull final Urn incidentUrn, @Nonnull final Authentication authentication) { + Objects.requireNonNull(incidentUrn, "incidentUrn must not be null"); + Objects.requireNonNull(authentication, "authentication must not be null"); + try { + return this.entityClient.getV2( + Constants.INCIDENT_ENTITY_NAME, + incidentUrn, + ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve Incident with urn %s", incidentUrn), e); + } + } + + /** + * Returns an instance of {@link EntityResponse} for the specified Entity urn containing the + * incidents summary aspect or null if one cannot be found. + * + * @param entityUrn the urn of the Entity for which to fetch incident summary + * @param authentication the authentication to use + * @return an instance of {@link EntityResponse} for the View, null if it does not exist. + */ + @Nullable + private EntityResponse getIncidentsSummaryResponse( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) { + Objects.requireNonNull(entityUrn, "entityUrn must not be null"); + Objects.requireNonNull(authentication, "authentication must not be null"); + try { + return this.entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.INCIDENTS_SUMMARY_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve Incident Summary for entity with urn %s", entityUrn), + e); + } + } +} diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index 32e68e7b13343..84a3302d5638f 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -207,6 +207,7 @@ "EDIT_ENTITY_DATA_PRODUCTS", "EDIT_DEPRECATION_PRIVILEGE", "EDIT_ENTITY_ASSERTIONS", + "EDIT_ENTITY_INCIDENTS", "EDIT_ENTITY", "EDIT_DATASET_COL_TAGS", "EDIT_DATASET_COL_GLOSSARY_TERMS", @@ -286,6 +287,7 @@ "EDIT_ENTITY_DATA_PRODUCTS", "EDIT_DEPRECATION_PRIVILEGE", "EDIT_ENTITY_ASSERTIONS", + "EDIT_ENTITY_INCIDENTS", "EDIT_DATASET_COL_TAGS", "EDIT_DATASET_COL_GLOSSARY_TERMS", "EDIT_DATASET_COL_DESCRIPTION", @@ -430,6 +432,7 @@ "EDIT_ENTITY_DATA_PRODUCTS", "EDIT_DEPRECATION_PRIVILEGE", "EDIT_ENTITY_ASSERTIONS", + "EDIT_ENTITY_INCIDENTS", "EDIT_ENTITY", "EDIT_DATASET_COL_TAGS", "EDIT_DATASET_COL_GLOSSARY_TERMS", diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index 8258a7d226ed6..7eb5f920958cf 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -201,6 +201,12 @@ public class PoliciesConfig { "Edit Operations", "The ability to report or edit operations information about an entity."); + public static final Privilege EDIT_ENTITY_INCIDENTS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_INCIDENTS", + "Edit Incidents", + "The ability to create and remove incidents for an entity."); + public static final Privilege EDIT_ENTITY_PRIVILEGE = Privilege.of( "EDIT_ENTITY", @@ -235,7 +241,8 @@ public class PoliciesConfig { EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, EDIT_ENTITY_DEPRECATION_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, - DELETE_ENTITY_PRIVILEGE); + DELETE_ENTITY_PRIVILEGE, + EDIT_ENTITY_INCIDENTS_PRIVILEGE); // Dataset Privileges public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = From d0c2f8795779c633dfbc00e00339c1be54dfc4fe Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 13:41:32 -0800 Subject: [PATCH 03/25] Adding incidents test --- smoke-test/tests/incidents/__init__.py | 0 smoke-test/tests/incidents/data.json | 167 ++++++++++++ smoke-test/tests/incidents/incidents_test.py | 254 +++++++++++++++++++ 3 files changed, 421 insertions(+) create mode 100644 smoke-test/tests/incidents/__init__.py create mode 100644 smoke-test/tests/incidents/data.json create mode 100644 smoke-test/tests/incidents/incidents_test.py diff --git a/smoke-test/tests/incidents/__init__.py b/smoke-test/tests/incidents/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/incidents/data.json b/smoke-test/tests/incidents/data.json new file mode 100644 index 0000000000000..27fd87946a873 --- /dev/null +++ b/smoke-test/tests/incidents/data.json @@ -0,0 +1,167 @@ +[ + { + "auditHeader": null, + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:kafka,incidents-sample-dataset,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": ["/prod/kafka/SampleKafkaDataset"] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "description": null, + "uri": null, + "tags": [], + "customProperties": { + "prop1": "fakeprop", + "prop2": "pikachu" + } + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jdoe", + "type": "DATAOWNER", + "source": null + }, + { + "owner": "urn:li:corpuser:datahub", + "type": "DATAOWNER", + "source": null + } + ], + "lastModified": { + "time": 1581407189000, + "actor": "urn:li:corpuser:jdoe", + "impersonator": null + } + } + }, + { + "com.linkedin.pegasus2avro.common.InstitutionalMemory": { + "elements": [ + { + "url": "https://www.linkedin.com", + "description": "Sample doc", + "createStamp": { + "time": 1581407189000, + "actor": "urn:li:corpuser:jdoe", + "impersonator": null + } + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "SampleKafkaSchema", + "platform": "urn:li:dataPlatform:kafka", + "version": 0, + "created": { + "time": 1581407189000, + "actor": "urn:li:corpuser:jdoe", + "impersonator": null + }, + "lastModified": { + "time": 1581407189000, + "actor": "urn:li:corpuser:jdoe", + "impersonator": null + }, + "deleted": null, + "dataset": null, + "cluster": null, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.KafkaSchema": { + "documentSchema": "{\"type\":\"record\",\"name\":\"SampleKafkaSchema\",\"namespace\":\"com.linkedin.dataset\",\"doc\":\"Sample Kafka dataset\",\"fields\":[{\"name\":\"field_foo\",\"type\":[\"string\"]},{\"name\":\"field_bar\",\"type\":[\"boolean\"]}]}" + } + }, + "fields": [ + { + "fieldPath": "[version=2.0].[type=boolean].field_foo_2", + "jsonPath": null, + "nullable": false, + "description": { + "string": "Foo field description" + }, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "varchar(100)", + "globalTags": { + "tags": [{ "tag": "urn:li:tag:NeedsDocumentation" }] + }, + "recursive": false + }, + { + "fieldPath": "[version=2.0].[type=boolean].field_bar", + "jsonPath": null, + "nullable": false, + "description": { + "string": "Bar field description" + }, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false + }, + { + "fieldPath": "[version=2.0].[key=True].[type=int].id", + "jsonPath": null, + "nullable": false, + "description": { + "string": "Id specifying which partition the message should go to" + }, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false + } + ], + "primaryKeys": null, + "foreignKeysSpecs": null + } + } + ] + } + }, + "proposedDelta": null + }, + { + "auditHeader":null, + "entityType":"incident", + "entityUrn": "urn:li:incident:test", + "changeType":"UPSERT", + "aspectName":"incidentInfo", + "aspect":{ + "value":"{\"type\": \"OPERATIONAL\", \"title\": \"test title\", \"description\": \"test description\", \"entities\": [\"urn:li:dataset:(urn:li:dataPlatform:kafka,incidents-sample-dataset,PROD)\"], \"status\": { \"state\": \"ACTIVE\", \"lastUpdated\": { \"time\": 0, \"actor\": \"urn:li:corpuser:admin\" } }, \"source\": { \"type\": \"ASSERTION_FAILURE\", \"sourceUrn\": \"urn:li:assertion:assertion-test\"}, \"created\": { \"time\": 0, \"actor\": \"urn:li:corpuser:admin\" } }", + "contentType":"application/json" + }, + "systemMetadata":null + }, + { + "auditHeader":null, + "entityType":"assertion", + "entityUrn": "urn:li:assertion:assertion-test", + "changeType":"UPSERT", + "aspectName":"assertionInfo", + "aspect":{ + "value":"{\"type\": \"DATASET\" }", + "contentType":"application/json" + }, + "systemMetadata":null + } +] \ No newline at end of file diff --git a/smoke-test/tests/incidents/incidents_test.py b/smoke-test/tests/incidents/incidents_test.py new file mode 100644 index 0000000000000..627ec9e0b6710 --- /dev/null +++ b/smoke-test/tests/incidents/incidents_test.py @@ -0,0 +1,254 @@ +import time + +import pytest + +from tests.utils import (delete_urns_from_file, get_frontend_url, get_gms_url, + ingest_file_via_rest) + + +@pytest.fixture(scope="module", autouse=True) +def ingest_cleanup_data(request): + print("ingesting incidents test data") + ingest_file_via_rest("tests/incidents/data.json") + yield + print("removing incidents test data") + delete_urns_from_file("tests/incidents/data.json") + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +TEST_DATASET_URN = ( + "urn:li:dataset:(urn:li:dataPlatform:kafka,incidents-sample-dataset,PROD)" +) +TEST_INCIDENT_URN = "urn:li:incident:test" + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_list_dataset_incidents(frontend_session): + + # Sleep for eventual consistency (not ideal) + time.sleep(2) + + list_dataset_incidents_json = { + "query": """query dataset($urn: String!) {\n + dataset(urn: $urn) {\n + incidents(state: ACTIVE, start: 0, count: 10) {\n + start\n + count\n + total\n + incidents {\n + urn\n + type\n + incidentType\n + title\n + description\n + status {\n + state\n + message\n + lastUpdated {\n + time\n + actor\n + }\n + }\n + source {\n + type\n + source {\n + ... on Assertion {\n + urn\n + info {\n + type + }\n + }\n + }\n + }\n + entity {\n + urn\n + }\n + created {\n + time\n + actor\n + }\n + }\n + }\n + }\n + }""", + "variables": {"urn": TEST_DATASET_URN}, + } + + response = frontend_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=list_dataset_incidents_json + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert "errors" not in res_data + assert res_data["data"] + assert res_data["data"]["dataset"]["incidents"] == { + "start": 0, + "count": 10, + "total": 1, + "incidents": [ + { + "urn": TEST_INCIDENT_URN, + "type": "INCIDENT", + "incidentType": "OPERATIONAL", + "title": "test title", + "description": "test description", + "status": { + "state": "ACTIVE", + "message": None, + "lastUpdated": {"time": 0, "actor": "urn:li:corpuser:admin"}, + }, + "source": { + "type": "ASSERTION_FAILURE", + "source": { + "urn": "urn:li:assertion:assertion-test", + "info": {"type": "DATASET"}, + }, + }, + "entity": {"urn": TEST_DATASET_URN}, + "created": {"time": 0, "actor": "urn:li:corpuser:admin"}, + } + ], + } + + +@pytest.mark.dependency( + depends=[ + "test_healthchecks", + "test_list_dataset_incidents", + "test_search_all_incidents", + ] +) +def test_raise_resolve_incident(frontend_session): + + # Raise new incident + raise_incident_json = { + "query": """mutation raiseIncident($input: RaiseIncidentInput!) {\n + raiseIncident(input: $input) + }""", + "variables": { + "input": { + "type": "OPERATIONAL", + "title": "test title 2", + "description": "test description 2", + "resourceUrn": TEST_DATASET_URN, + "priority": 0, + } + }, + } + + response = frontend_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=raise_incident_json + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert "errors" not in res_data + assert res_data["data"] + assert res_data["data"]["raiseIncident"] is not None + + new_incident_urn = res_data["data"]["raiseIncident"] + + # Resolve the incident. + update_incident_status = { + "query": """mutation updateIncidentStatus($urn: String!, $input: UpdateIncidentStatusInput!) {\n + updateIncidentStatus(urn: $urn, input: $input) + }""", + "variables": { + "urn": new_incident_urn, + "input": { + "state": "RESOLVED", + "message": "test message 2", + }, + }, + } + + response = frontend_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=update_incident_status + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert "errors" not in res_data + assert res_data["data"] + assert res_data["data"]["updateIncidentStatus"] is True + + # Sleep for eventual consistency (not ideal) + time.sleep(2) + + # Fetch the dataset's incidents to confirm there's a resolved incident.new_incident_urn + list_dataset_incidents_json = { + "query": """query dataset($urn: String!) {\n + dataset(urn: $urn) {\n + incidents(state: RESOLVED, start: 0, count: 10) {\n + start\n + count\n + total\n + incidents {\n + urn\n + type\n + incidentType\n + title\n + description\n + priority\n + status {\n + state\n + message\n + lastUpdated {\n + time\n + actor\n + }\n + }\n + entity {\n + urn\n + }\n + created {\n + time\n + actor\n + }\n + }\n + }\n + }\n + }""", + "variables": {"urn": TEST_DATASET_URN}, + } + + response = frontend_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=list_dataset_incidents_json + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["dataset"]["incidents"]["total"] is not None + assert "errors" not in res_data + + # Find the new incident and do the comparison. + active_incidents = res_data["data"]["dataset"]["incidents"]["incidents"] + filtered_incidents = list( + filter(lambda incident: incident["urn"] == new_incident_urn, active_incidents) + ) + assert len(filtered_incidents) == 1 + new_incident = filtered_incidents[0] + assert new_incident["title"] == "test title 2" + assert new_incident["description"] == "test description 2" + assert new_incident["status"]["state"] == "RESOLVED" + assert new_incident["priority"] == 0 + + delete_json = {"urn": new_incident_urn} + + # Cleanup: Delete the incident + response = frontend_session.post( + f"{get_gms_url()}/entities?action=delete", json=delete_json + ) + + response.raise_for_status() From c58e2fd7a431155604710aed02480f84c207d2bb Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 14:15:12 -0800 Subject: [PATCH 04/25] Add support for Entity Health for Datasets, Dashboards, Charts, Data Jobs, Data Flows --- .../datahub/graphql/GmsGraphQLEngine.java | 45 ++- .../health/EntityHealthResolver.java | 328 ++++++++++++++++++ .../src/main/resources/entity.graphql | 27 +- .../src/main/resources/incident.graphql | 28 -- .../EntityHealthResolverTest.java} | 6 +- .../incident/EntityIncidentsResolverTest.java | 6 +- .../types/incident/IncidentMapperTest.java | 8 +- .../types/incident/IncidentTypeTest.java | 4 +- .../com/linkedin/incident/IncidentType.pdl | 24 -- 9 files changed, 405 insertions(+), 71 deletions(-) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java rename datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/{dataset/DatasetHealthResolverTest.java => health/EntityHealthResolverTest.java} (97%) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 3f1caaaf9fdfd..33c0e6a0a5241 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -126,7 +126,6 @@ import com.linkedin.datahub.graphql.resolvers.dataproduct.DeleteDataProductResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.ListDataProductAssetsResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.UpdateDataProductResolver; -import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetStatsSummaryResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetUsageStatsResolver; import com.linkedin.datahub.graphql.resolvers.deprecation.UpdateDeprecationResolver; @@ -159,6 +158,7 @@ import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver; +import com.linkedin.datahub.graphql.resolvers.health.EntityHealthResolver; import com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver; @@ -1493,7 +1493,12 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) .dataFetcher( - "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(true, true))) .dataFetcher("schemaMetadata", new AspectResolver()) .dataFetcher( "assertions", new EntityAssertionsResolver(entityClient, graphClient)) @@ -1842,7 +1847,14 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); builder.type( "DashboardInfo", typeWiring -> @@ -1959,7 +1971,14 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); builder.type( "ChartInfo", typeWiring -> @@ -2064,7 +2083,14 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { })) .dataFetcher("runs", new DataJobRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))) .type( "DataJobInputOutput", typeWiring -> @@ -2127,7 +2153,14 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }))); + })) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java new file mode 100644 index 0000000000000..c8da9558fefeb --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java @@ -0,0 +1,328 @@ +package com.linkedin.datahub.graphql.resolvers.health; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationships; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Health; +import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.generated.HealthStatusType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.timeseries.AggregationSpec; +import com.linkedin.timeseries.AggregationType; +import com.linkedin.timeseries.GenericTable; +import com.linkedin.timeseries.GroupingBucket; +import com.linkedin.timeseries.GroupingBucketType; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.extern.slf4j.Slf4j; + +/** + * Resolver for generating the health badge for an asset, which depends on + * + *

1. Assertions status - whether the asset has active assertions 2. Incidents status - whether + * the asset has active incidents + */ +@Slf4j +public class EntityHealthResolver implements DataFetcher>> { + private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; + private static final String ASSERTION_RUN_EVENT_SUCCESS_TYPE = "SUCCESS"; + private static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + private static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + + private final EntityClient _entityClient; + private final GraphClient _graphClient; + private final TimeseriesAspectService _timeseriesAspectService; + + private final Config _config; + + private final Cache _statusCache; + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService) { + this(entityClient, graphClient, timeseriesAspectService, new Config(true, true)); + } + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService, + @Nonnull final Config config) { + _entityClient = entityClient; + _graphClient = graphClient; + _timeseriesAspectService = timeseriesAspectService; + _statusCache = + CacheBuilder.newBuilder().maximumSize(0).expireAfterWrite(1, TimeUnit.MINUTES).build(); + _config = config; + } + + @Override + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { + final Entity parent = environment.getSource(); + return CompletableFuture.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> (computeHealthStatusForAsset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve asset's health status.", e); + } + }); + } + + /** + * Computes the "resolved health status" for an asset by + * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing + */ + private CachedHealth computeHealthStatusForAsset( + final String entityUrn, final QueryContext context) { + final List healthStatuses = new ArrayList<>(); + + if (_config.getIncidentsEnabled()) { + final Health incidentsHealth = computeIncidentsHealthForAsset(entityUrn, context); + if (incidentsHealth != null) { + healthStatuses.add(incidentsHealth); + } + } + + if (_config.getAssertionsEnabled()) { + final Health assertionsHealth = computeAssertionHealthForAsset(entityUrn, context); + if (assertionsHealth != null) { + healthStatuses.add(assertionsHealth); + } + } + + return new CachedHealth(healthStatuses); + } + + /** + * Returns the resolved "incidents health", which is currently a static function of whether there + * are any active incidents open on an asset + * + * @param entityUrn the asset to compute health for + * @param context the query context + * @return an instance of {@link Health} for the entity, null if one cannot be computed. + */ + private Health computeIncidentsHealthForAsset( + final String entityUrn, final QueryContext context) { + try { + final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString()); + final SearchResult searchResult = + _entityClient.filter( + Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1, context.getAuthentication()); + final Integer activeIncidentCount = searchResult.getNumEntities(); + if (activeIncidentCount > 0) { + // There are active incidents. + return new Health( + HealthStatusType.INCIDENTS, + HealthStatus.FAIL, + String.format( + "%s active incident%s", activeIncidentCount, activeIncidentCount > 1 ? "s" : ""), + ImmutableList.of("ACTIVE_INCIDENTS")); + } + // Report pass if there are no active incidents. + return new Health(HealthStatusType.INCIDENTS, HealthStatus.PASS, null, null); + } catch (RemoteInvocationException e) { + log.error("Failed to compute incident health status!", e); + return null; + } + } + + private Filter buildIncidentsEntityFilter(final String entityUrn, final String state) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, state); + return QueryUtils.newFilter(criterionMap); + } + + /** + * TODO: Replace this with the assertions summary aspect. + * + *

Returns the resolved "assertions health", which is currently a static function of whether + * the most recent run of all asset assertions has succeeded. + * + * @param entityUrn the entity to compute health for + * @param context the query context + * @return an instance of {@link Health} for the asset, null if one cannot be computed. + */ + @Nullable + private Health computeAssertionHealthForAsset( + final String entityUrn, final QueryContext context) { + // Get active assertion urns + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); + + if (relationships.getTotal() > 0) { + + // If there are assertions defined, then we should return a non-null health for this asset. + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); + + final GenericTable assertionRunResults = getAssertionRunsTable(entityUrn); + + if (!assertionRunResults.hasRows() || assertionRunResults.getRows().size() == 0) { + // No assertion run results found. Return empty health! + return null; + } + + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + + // Finally compute & return the health. + final Health health = new Health(); + health.setType(HealthStatusType.ASSERTIONS); + if (failingAssertionUrns.size() > 0) { + health.setStatus(HealthStatus.FAIL); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); + health.setCauses(failingAssertionUrns); + } else { + health.setStatus(HealthStatus.PASS); + health.setMessage("All assertions are passing"); + } + return health; + } + return null; + } + + private GenericTable getAssertionRunsTable(final String asserteeUrn) { + return _timeseriesAspectService.getAggregatedStats( + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + createAssertionAggregationSpecs(), + createAssertionsFilter(asserteeUrn), + createAssertionGroupingBuckets()); + } + + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + // Create the buckets based on the result + return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); + } + + private Filter createAssertionsFilter(final String datasetUrn) { + final Filter filter = new Filter(); + final ArrayList criteria = new ArrayList<>(); + + // Add filter for asserteeUrn == datasetUrn + Criterion datasetUrnCriterion = + new Criterion().setField("asserteeUrn").setCondition(Condition.EQUAL).setValue(datasetUrn); + criteria.add(datasetUrnCriterion); + + // Add filter for result == result + Criterion startTimeCriterion = + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + criteria.add(startTimeCriterion); + + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + return filter; + } + + private AggregationSpec[] createAssertionAggregationSpecs() { + // Simply fetch the timestamp, result type for the assertion URN. + AggregationSpec resultTypeAggregation = + new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); + AggregationSpec timestampAggregation = + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; + } + + private GroupingBucket[] createAssertionGroupingBuckets() { + // String grouping bucket on "assertionUrn" + GroupingBucket assertionUrnBucket = new GroupingBucket(); + assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); + return new GroupingBucket[] {assertionUrnBucket}; + } + + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { + final List failedAssertionUrns = new ArrayList<>(); + for (StringArray row : rows) { + // Result structure should be assertionUrn, event.result.type, timestampMillis + if (row.size() != 3) { + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); + } + + final String assertionUrn = row.get(0); + final String resultType = row.get(1); + + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + failedAssertionUrns.add(assertionUrn); + } + } + return failedAssertionUrns; + } + + @Data + @AllArgsConstructor + public static class Config { + private Boolean assertionsEnabled; + private Boolean incidentsEnabled; + } + + @AllArgsConstructor + private static class CachedHealth { + private final List healths; + } +} diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 2413cadfbb210..56d912cdb4ad2 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -1702,7 +1702,7 @@ type VersionedDataset implements Entity { domain: DomainAssociation """ - Experimental! The resolved health status of the Dataset + Experimental! The resolved health status of the asset """ health: [Health!] @@ -5135,6 +5135,11 @@ type Dashboard implements EntityWithRelationships & Entity & BrowsableEntity { Structured properties about this asset """ structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] } """ @@ -5456,6 +5461,11 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Structured properties about this asset """ structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] } """ @@ -5824,6 +5834,11 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { Structured properties about this asset """ structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] } """ @@ -6035,6 +6050,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { Structured properties about this asset """ structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] } """ @@ -10215,6 +10235,11 @@ enum HealthStatusType { Assertions status """ ASSERTIONS + + """ + Incidents status + """ + INCIDENTS } """ diff --git a/datahub-graphql-core/src/main/resources/incident.graphql b/datahub-graphql-core/src/main/resources/incident.graphql index 13d1793739c15..c3f4f35be608d 100644 --- a/datahub-graphql-core/src/main/resources/incident.graphql +++ b/datahub-graphql-core/src/main/resources/incident.graphql @@ -131,30 +131,6 @@ enum IncidentState { A specific type of incident """ enum IncidentType { - """ - A Freshness Assertion has failed, triggering the incident. - Raised on assets where assertions are configured to generate incidents. - """ - FRESHNESS - - """ - A Volume Assertion has failed, triggering the incident. - Raised on assets where assertions are configured to generate incidents. - """ - VOLUME - - """ - An assertion on a particular column(s) of a Dataset has triggered the incident. - Raised on Datasets where assertions are configured to generate incidents. - """ - DATASET_COLUMN - - """ - An assertion on the row count of a Dataset has triggered the incident. - Raised on datasets where assertions are configured to generate incidents. - """ - DATASET_ROWS - """ An operational incident, e.g. failure to materialize a dataset, or failure to execute a task / pipeline. """ @@ -193,10 +169,6 @@ enum IncidentSourceType { The incident was created manually, from either the API or the UI. """ MANUAL - """ - An assertion has failed, triggering the incident. - """ - ASSERTION_FAILURE } """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java similarity index 97% rename from datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java rename to datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java index 3ff0120448e54..2129821e0d95f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.graphql.resolvers.dataset; +package com.linkedin.datahub.graphql.resolvers.health; import static org.testng.Assert.*; @@ -14,6 +14,7 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Health; import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; @@ -25,7 +26,8 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -public class DatasetHealthResolverTest { +// TODO: Update this test once assertions summary has been added. +public class EntityHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java index 41e9458243a07..a3f4b508dfc3e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -57,16 +57,14 @@ public void testGetSuccess() throws Exception { IncidentInfo expectedInfo = new IncidentInfo() - .setType(IncidentType.DATASET_COLUMN) + .setType(IncidentType.OPERATIONAL) .setCustomType("Custom Type") .setDescription("Description") .setPriority(5) .setTitle("Title") .setEntities(new UrnArray(ImmutableList.of(datasetUrn))) .setSource( - new IncidentSource() - .setType(IncidentSourceType.ASSERTION_FAILURE) - .setSourceUrn(assertionUrn)) + new IncidentSource().setType(IncidentSourceType.MANUAL).setSourceUrn(assertionUrn)) .setStatus( new IncidentStatus() .setState(IncidentState.ACTIVE) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java index f0586929a39f1..d637f873533ef 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java @@ -35,7 +35,7 @@ public void testMap() throws Exception { EnvelopedAspect envelopedIncidentInfo = new EnvelopedAspect(); IncidentInfo incidentInfo = new IncidentInfo(); - incidentInfo.setType(IncidentType.DATASET_COLUMN); + incidentInfo.setType(IncidentType.OPERATIONAL); incidentInfo.setCustomType("Custom Type"); incidentInfo.setTitle("Test Incident", SetMode.IGNORE_NULL); incidentInfo.setDescription("This is a test incident", SetMode.IGNORE_NULL); @@ -43,7 +43,7 @@ public void testMap() throws Exception { incidentInfo.setEntities(new UrnArray(Collections.singletonList(urn))); IncidentSource source = new IncidentSource(); - source.setType(IncidentSourceType.ASSERTION_FAILURE); + source.setType(IncidentSourceType.MANUAL); source.setSourceUrn(assertionUrn); incidentInfo.setSource(source); @@ -76,13 +76,13 @@ public void testMap() throws Exception { assertEquals(incident.getCustomType(), "Custom Type"); assertEquals( incident.getIncidentType().toString(), - com.linkedin.datahub.graphql.generated.IncidentType.DATASET_COLUMN.toString()); + com.linkedin.datahub.graphql.generated.IncidentType.OPERATIONAL.toString()); assertEquals(incident.getTitle(), "Test Incident"); assertEquals(incident.getDescription(), "This is a test incident"); assertEquals(incident.getPriority().intValue(), 1); assertEquals( incident.getSource().getType().toString(), - com.linkedin.datahub.graphql.generated.IncidentSourceType.ASSERTION_FAILURE.toString()); + com.linkedin.datahub.graphql.generated.IncidentSourceType.MANUAL.toString()); assertEquals(incident.getSource().getSource().getUrn(), assertionUrn.toString()); assertEquals( incident.getStatus().getState().toString(), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java index 219339e1d65b8..ad787f29e8b2a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java @@ -54,7 +54,7 @@ public class IncidentTypeTest { private static final IncidentKey TEST_INCIDENT_KEY = new IncidentKey().setId("guid-1"); private static final IncidentInfo TEST_INCIDENT_INFO = new IncidentInfo() - .setType(IncidentType.DATASET_COLUMN) + .setType(IncidentType.OPERATIONAL) .setCustomType("Custom Type") .setDescription("Description") .setPriority(5) @@ -62,7 +62,7 @@ public class IncidentTypeTest { .setEntities(new UrnArray(ImmutableList.of(testDatasetUrn))) .setSource( new IncidentSource() - .setType(IncidentSourceType.ASSERTION_FAILURE) + .setType(IncidentSourceType.MANUAL) .setSourceUrn(testAssertionUrn)) .setStatus( new IncidentStatus() diff --git a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl index 56de7a145f542..27c4790e3b6ef 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/incident/IncidentType.pdl @@ -4,30 +4,6 @@ namespace com.linkedin.incident * A type of asset incident */ enum IncidentType { - /** - * An Freshness Assertion has failed, triggering the incident. - * Raised on entities where assertions are configured to generate incidents. - */ - FRESHNESS - - /** - * An Volume Assertion has failed, triggering the incident. - * Raised on entities where assertions are configured to generate incidents. - */ - VOLUME - - /** - * An assertion on a particular column(s) of a Dataset has triggered the incident. - * Raised on Datasets where assertions are configured to generate incidents. - */ - DATASET_COLUMN - - /** - * An assertion on the row count of a Dataset has triggered the incident. - * Raised on datasets where assertions are configured to generate incidents. - */ - DATASET_ROWS - /** * A misc. operational incident, e.g. failure to materialize a dataset. */ From 4e70723887d0b477b6ea183e98f548d6eb8c87bd Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 14:18:47 -0800 Subject: [PATCH 05/25] Remove legacy incidents doc reference from docs website --- docs/managed-datahub/managed-datahub-overview.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/managed-datahub/managed-datahub-overview.md b/docs/managed-datahub/managed-datahub-overview.md index dc3d454c82d95..79ffc2ce98451 100644 --- a/docs/managed-datahub/managed-datahub-overview.md +++ b/docs/managed-datahub/managed-datahub-overview.md @@ -20,7 +20,6 @@ Acryl DataHub offers a slew of additional features on top of the normal OSS proj ## Expanded API Features - [Entity Events API](docs/managed-datahub/datahub-api/entity-events-api.md) -- [Incidents API](docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md) ## More Ways to Act on Metadata From 7030eb3209f2584a10f8bc5687566837774a269d Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 15:32:40 -0800 Subject: [PATCH 06/25] Add raiseIncident and updateIncidentStatus back into GMS GraphQL Engine properly --- .../com/linkedin/datahub/graphql/GmsGraphQLEngine.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 33c0e6a0a5241..8bc3391b6e402 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -160,6 +160,8 @@ import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver; import com.linkedin.datahub.graphql.resolvers.health.EntityHealthResolver; import com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver; +import com.linkedin.datahub.graphql.resolvers.incident.RaiseIncidentResolver; +import com.linkedin.datahub.graphql.resolvers.incident.UpdateIncidentStatusResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateTestConnectionRequestResolver; @@ -1210,7 +1212,11 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { "createDynamicFormAssignment", new CreateDynamicFormAssignmentResolver(this.formService)) .dataFetcher( - "verifyForm", new VerifyFormResolver(this.formService, this.groupService))); + "verifyForm", new VerifyFormResolver(this.formService, this.groupService)) + .dataFetcher("raiseIncident", new RaiseIncidentResolver(this.entityClient)) + .dataFetcher( + "updateIncidentStatus", + new UpdateIncidentStatusResolver(this.entityClient, this.entityService))); } private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { From a9348fc1c2d0fdb24954dd5f921d553a1093379c Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 15:33:44 -0800 Subject: [PATCH 07/25] Adding the Incidents Status Hook to OSS for use --- .../service/IncidentsSummaryUtilsTest.java | 309 ++++++++++++++++++ .../kafka/MaeConsumerApplication.java | 1 + .../kafka/MetadataChangeLogProcessor.java | 4 +- .../metadata/kafka/hook/HookUtils.java | 33 ++ .../hook/incident/IncidentsSummaryHook.java | 269 +++++++++++++++ .../incident/IncidentsSummaryHookTest.java | 309 ++++++++++++++++++ .../kafka/hook/spring/MCLSpringTest.java | 6 + .../service/IncidentsSummaryUtils.java | 93 ++++++ 8 files changed, 1023 insertions(+), 1 deletion(-) create mode 100644 metadata-io/src/test/java/com/linkedin/metadata/service/IncidentsSummaryUtilsTest.java create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/HookUtils.java create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java create mode 100644 metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHookTest.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentsSummaryUtils.java diff --git a/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentsSummaryUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentsSummaryUtilsTest.java new file mode 100644 index 0000000000000..d85d1f32ac58b --- /dev/null +++ b/metadata-io/src/test/java/com/linkedin/metadata/service/IncidentsSummaryUtilsTest.java @@ -0,0 +1,309 @@ +package com.linkedin.metadata.service; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.IncidentSummaryDetails; +import com.linkedin.common.IncidentSummaryDetailsArray; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class IncidentsSummaryUtilsTest { + + private static final Urn TEST_INCIDENT_URN = UrnUtils.getUrn("urn:li:incident:test"); + private static final Urn TEST_INCIDENT_URN_2 = UrnUtils.getUrn("urn:li:incident:test-2"); + private static final String TEST_INCIDENT_TYPE = "testType"; + + @Test + public void testRemoveIncidentFromResolvedSummaryLegacy() { + // Case 1: Has the incident in resolved. + IncidentsSummary summary = + mockIncidentsSummaryLegacy(ImmutableList.of(TEST_INCIDENT_URN), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList())); + + // Case 2: Has the incident in active. + summary = + mockIncidentsSummaryLegacy(Collections.emptyList(), ImmutableList.of(TEST_INCIDENT_URN)); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummaryLegacy(Collections.emptyList(), ImmutableList.of(TEST_INCIDENT_URN))); + + // Case 3: Does not have the incident at all. + summary = mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList())); + + // Case 4: Has 2 items in list. + summary = + mockIncidentsSummaryLegacy( + ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummaryLegacy(ImmutableList.of(TEST_INCIDENT_URN_2), Collections.emptyList())); + } + + @Test + public void testRemoveIncidentFromActiveSummaryLegacy() { + // Case 1: Has the incident in active. + IncidentsSummary summary = + mockIncidentsSummaryLegacy(Collections.emptyList(), ImmutableList.of(TEST_INCIDENT_URN)); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList())); + + // Case 2: Has the incident in resolved. + summary = + mockIncidentsSummaryLegacy(ImmutableList.of(TEST_INCIDENT_URN), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummaryLegacy(ImmutableList.of(TEST_INCIDENT_URN), Collections.emptyList())); + + // Case 3: Does not have the incident at all. + summary = mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummaryLegacy(Collections.emptyList(), Collections.emptyList())); + + // Case 4: Has 2 items in list. + summary = + mockIncidentsSummaryLegacy( + Collections.emptyList(), ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2)); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummaryLegacy(Collections.emptyList(), ImmutableList.of(TEST_INCIDENT_URN_2))); + } + + @Test + public void testRemoveIncidentFromResolvedSummary() { + // Case 1: Has the incident in resolved details. + IncidentsSummary summary = + mockIncidentsSummary( + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE)), + Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), Collections.emptyList())); + + // Case 2: Has the incident in active. + summary = + mockIncidentsSummary( + Collections.emptyList(), + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE))); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummary( + Collections.emptyList(), + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE)))); + + // Case 3: Does not have the incident at all. + summary = mockIncidentsSummary(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), Collections.emptyList())); + + // Case 4: Has 2 items in list. + summary = + mockIncidentsSummary( + ImmutableList.of( + buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE), + buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE)), + Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummary( + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE)), + Collections.emptyList())); + } + + @Test + public void testRemoveIncidentFromActiveSummary() { + // Case 1: Has the incident in active. + IncidentsSummary summary = + mockIncidentsSummary( + Collections.emptyList(), + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE))); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), Collections.emptyList())); + + // Case 2: Has the incident in resolved. + summary = + mockIncidentsSummary( + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE)), + Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummary( + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE)), + Collections.emptyList())); + + // Case 3: Does not have the incident at all. + summary = mockIncidentsSummary(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), Collections.emptyList())); + + // Case 4: Has 2 items in list. + summary = + mockIncidentsSummary( + Collections.emptyList(), + ImmutableList.of( + buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE), + buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE))); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(TEST_INCIDENT_URN, summary); + Assert.assertEquals( + summary, + mockIncidentsSummary( + Collections.emptyList(), + ImmutableList.of(buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE)))); + } + + @Test + public void testAddIncidentToActiveSummary() { + // Case 1: Has an incident in active. + IncidentSummaryDetails existingDetails = + buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE); + IncidentsSummary summary = + mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(existingDetails)); + IncidentSummaryDetails newDetails = + buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE); + IncidentsSummaryUtils.addIncidentToActiveSummary(newDetails, summary, 100); + IncidentsSummary expected = + mockIncidentsSummary( + Collections.emptyList(), ImmutableList.of(existingDetails, newDetails)); + Assert.assertEquals( + new HashSet<>(summary.getActiveIncidentDetails()), + new HashSet<>(expected.getActiveIncidentDetails())); // Set comparison + Assert.assertEquals( + new HashSet<>(summary.getResolvedIncidentDetails()), + new HashSet<>(expected.getResolvedIncidentDetails())); // Set comparison + + // Case 2: Has an incident in resolved. + summary = mockIncidentsSummary(ImmutableList.of(existingDetails), Collections.emptyList()); + IncidentsSummaryUtils.addIncidentToActiveSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, + mockIncidentsSummary(ImmutableList.of(existingDetails), ImmutableList.of(newDetails))); + + // Case 3: Does not have any incidents yet + summary = mockIncidentsSummary(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.addIncidentToActiveSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(newDetails))); + + // Case 4: Duplicate additions - already has the same incident in the list + summary = mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(existingDetails)); + newDetails = buildIncidentDetails(TEST_INCIDENT_URN, "type2"); + IncidentsSummaryUtils.addIncidentToActiveSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(newDetails))); + + // Test out max size, removes old and adds in new + summary = mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(existingDetails)); + newDetails = buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE); + IncidentsSummaryUtils.addIncidentToActiveSummary(newDetails, summary, 1); + Assert.assertEquals( + summary, mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(newDetails))); + Assert.assertEquals(summary.getActiveIncidentDetails().size(), 1); + } + + @Test + public void testAddIncidentToResolvedSummary() { + // Case 1: Has an incident in resolved. + IncidentSummaryDetails existingDetails = + buildIncidentDetails(TEST_INCIDENT_URN, TEST_INCIDENT_TYPE); + IncidentsSummary summary = + mockIncidentsSummary(ImmutableList.of(existingDetails), Collections.emptyList()); + IncidentSummaryDetails newDetails = + buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE); + IncidentsSummaryUtils.addIncidentToResolvedSummary(newDetails, summary, 100); + IncidentsSummary expected = + mockIncidentsSummary( + ImmutableList.of(existingDetails, newDetails), Collections.emptyList()); + Assert.assertEquals( + new HashSet<>(summary.getActiveIncidentDetails()), + new HashSet<>(expected.getActiveIncidentDetails())); // Set comparison + Assert.assertEquals( + new HashSet<>(summary.getResolvedIncidentDetails()), + new HashSet<>(expected.getResolvedIncidentDetails())); // Set comparison + + // Case 2: Has an incident in active. + summary = mockIncidentsSummary(Collections.emptyList(), ImmutableList.of(existingDetails)); + IncidentsSummaryUtils.addIncidentToResolvedSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, + mockIncidentsSummary(ImmutableList.of(newDetails), ImmutableList.of(existingDetails))); + + // Case 3: Does not have any incidents yet + summary = mockIncidentsSummary(Collections.emptyList(), Collections.emptyList()); + IncidentsSummaryUtils.addIncidentToResolvedSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, mockIncidentsSummary(ImmutableList.of(newDetails), Collections.emptyList())); + + // Case 4: Duplicate additions - already has the same incident + summary = mockIncidentsSummary(ImmutableList.of(existingDetails), Collections.emptyList()); + newDetails = buildIncidentDetails(TEST_INCIDENT_URN, "type2"); + IncidentsSummaryUtils.addIncidentToResolvedSummary(newDetails, summary, 100); + Assert.assertEquals( + summary, mockIncidentsSummary(ImmutableList.of(newDetails), Collections.emptyList())); + + // Test out max size, removes old and adds in new + summary = mockIncidentsSummary(ImmutableList.of(existingDetails), Collections.emptyList()); + newDetails = buildIncidentDetails(TEST_INCIDENT_URN_2, TEST_INCIDENT_TYPE); + IncidentsSummaryUtils.addIncidentToResolvedSummary(newDetails, summary, 1); + Assert.assertEquals( + summary, mockIncidentsSummary(ImmutableList.of(newDetails), Collections.emptyList())); + Assert.assertEquals(summary.getResolvedIncidentDetails().size(), 1); + } + + private IncidentsSummary mockIncidentsSummaryLegacy( + final List resolvedIncidents, final List activeIncidents) { + return new IncidentsSummary() + .setResolvedIncidents(new UrnArray(resolvedIncidents)) + .setActiveIncidents(new UrnArray(activeIncidents)); + } + + private IncidentsSummary mockIncidentsSummary( + final List resolvedIncidents, + final List activeIncidents) { + return new IncidentsSummary() + .setResolvedIncidentDetails(new IncidentSummaryDetailsArray(resolvedIncidents)) + .setActiveIncidentDetails(new IncidentSummaryDetailsArray(activeIncidents)); + } + + private IncidentSummaryDetails buildIncidentDetails(Urn incidentUrn, String incidentType) { + return buildIncidentDetails(incidentUrn, incidentType, 1, 0L, 1L); + } + + private IncidentSummaryDetails buildIncidentDetails( + Urn incidentUrn, String incidentType, Integer priority) { + return buildIncidentDetails(incidentUrn, incidentType, priority, 0L, 1L); + } + + private IncidentSummaryDetails buildIncidentDetails( + Urn incidentUrn, String incidentType, Integer priority, Long createdAt, Long resolvedAt) { + IncidentSummaryDetails details = new IncidentSummaryDetails(); + details.setUrn(incidentUrn); + details.setType(incidentType); + details.setPriority(priority, SetMode.IGNORE_NULL); + details.setCreatedAt(createdAt, SetMode.IGNORE_NULL); + details.setResolvedAt(resolvedAt, SetMode.IGNORE_NULL); + return details; + } +} diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index ae208c053d69f..018646fad07fc 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -22,6 +22,7 @@ "com.linkedin.gms.factory.entity.update.indices", "com.linkedin.gms.factory.entityclient", "com.linkedin.gms.factory.form", + "com.linkedin.gms.factory.incident", "com.linkedin.gms.factory.timeline.eventgenerator", "io.datahubproject.metadata.jobs.common.health.kafka" }, diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index 278c52030b5fc..8496c06db86a7 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; import com.linkedin.metadata.kafka.hook.form.FormAssignmentHook; +import com.linkedin.metadata.kafka.hook.incident.IncidentsSummaryHook; import com.linkedin.metadata.kafka.hook.ingestion.IngestionSchedulerHook; import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -38,7 +39,8 @@ EntityChangeEventGeneratorHook.class, KafkaEventConsumerFactory.class, SiblingAssociationHook.class, - FormAssignmentHook.class + FormAssignmentHook.class, + IncidentsSummaryHook.class, }) @EnableKafka public class MetadataChangeLogProcessor { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/HookUtils.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/HookUtils.java new file mode 100644 index 0000000000000..4cae074aa9e5e --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/HookUtils.java @@ -0,0 +1,33 @@ +package com.linkedin.metadata.kafka.hook; + +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeLog; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class HookUtils { + + /** + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. + */ + public static Urn getUrnFromEvent( + @Nonnull final MetadataChangeLog event, @Nonnull final EntityRegistry entityRegistry) { + EntitySpec entitySpec; + try { + entitySpec = entityRegistry.getEntitySpec(event.getEntityType()); + } catch (IllegalArgumentException e) { + log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + } + // Extract an URN from the Log Event. + return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); + } + + private HookUtils() {} +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java new file mode 100644 index 0000000000000..6cbaff224210b --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java @@ -0,0 +1,269 @@ +package com.linkedin.metadata.kafka.hook.incident; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.IncidentSummaryDetails; +import com.linkedin.common.IncidentSummaryDetailsArray; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.Status; +import com.linkedin.common.urn.Urn; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; +import com.linkedin.gms.factory.incident.IncidentServiceFactory; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.kafka.hook.HookUtils; +import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.service.IncidentService; +import com.linkedin.metadata.service.IncidentsSummaryUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeLog; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.inject.Singleton; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Import; +import org.springframework.stereotype.Component; + +/** + * This hook is responsible for maintaining the IncidentsSummary.pdl aspect of entities on which + * Incidents may be raised. It handles both incident updates and incident soft deletions to ensure + * that this aspect reflects the latest state of the incident. + * + *

Hard deletes of incidents are not handled within this hook because the expectation is that + * deleteReferences will be invoked to clean up references. + */ +@Slf4j +@Component +@Singleton +@Import({ + EntityRegistryFactory.class, + IncidentServiceFactory.class, + SystemAuthenticationFactory.class +}) +public class IncidentsSummaryHook implements MetadataChangeLogHook { + + private static final Set SUPPORTED_UPDATE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.RESTATE); + private static final Set SUPPORTED_UPDATE_ASPECTS = + ImmutableSet.of(INCIDENT_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + + private final EntityRegistry _entityRegistry; + private final IncidentService _incidentService; + private final boolean _isEnabled; + + /** Max number of incidents to allow in incident summary, limited to prevent HTTP errors */ + private final int _maxIncidentHistory; + + @Autowired + public IncidentsSummaryHook( + @Nonnull final EntityRegistry entityRegistry, + @Nonnull final IncidentService incidentService, + @Nonnull @Value("${incidents.hook.enabled:true}") Boolean isEnabled, + @Nonnull @Value("${incidents.hook.maxIncidentHistory:100}") Integer maxIncidentHistory) { + _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry is required"); + _incidentService = Objects.requireNonNull(incidentService, "incidentService is required"); + _isEnabled = isEnabled; + _maxIncidentHistory = maxIncidentHistory; + } + + @Override + public void init() {} + + @Override + public boolean isEnabled() { + return _isEnabled; + } + + @Override + public void invoke(@Nonnull final MetadataChangeLog event) { + if (_isEnabled && isEligibleForProcessing(event)) { + log.debug("Urn {} received by Incident Summary Hook.", event.getEntityUrn()); + final Urn urn = HookUtils.getUrnFromEvent(event, _entityRegistry); + // Handle the deletion case. + if (isIncidentSoftDeleted(event)) { + handleIncidentSoftDeleted(urn); + } else if (isIncidentUpdate(event)) { + handleIncidentUpdated(urn); + } + } + } + + /** + * Handles an incident deletion by removing the incident from either resolved or active incidents. + */ + private void handleIncidentSoftDeleted(@Nonnull final Urn incidentUrn) { + // 1. Fetch incident info. + IncidentInfo incidentInfo = _incidentService.getIncidentInfo(incidentUrn); + + // 2. Retrieve associated urns. + if (incidentInfo != null) { + final List incidentEntities = incidentInfo.getEntities(); + + // 3. For each urn, resolve the entity incidents aspect and remove from active and resolved + // incidents. + for (Urn entityUrn : incidentEntities) { + removeIncidentFromSummary(incidentUrn, entityUrn); + } + } else { + log.warn( + String.format( + "Failed to find incidentInfo aspect for incident with urn %s. Skipping updating incident summary for related incidents!", + incidentUrn)); + } + } + + /** Handle an incident update by adding to either resolved or active incidents for an entity. */ + private void handleIncidentUpdated(@Nonnull final Urn incidentUrn) { + // 1. Fetch incident info + status + IncidentInfo incidentInfo = _incidentService.getIncidentInfo(incidentUrn); + + // 2. Retrieve associated urns. + if (incidentInfo != null) { + final List incidentEntities = incidentInfo.getEntities(); + + // 3. For each urn, resolve the entity incidents aspect and add to active or resolved + // incidents. + for (Urn entityUrn : incidentEntities) { + addIncidentToSummary(incidentUrn, entityUrn, incidentInfo); + } + } else { + log.warn( + String.format( + "Failed to find incidentInfo aspect for incident with urn %s. Skipping updating incident summary for related incidents!", + incidentUrn)); + } + } + + /** Removes an incident to the IncidentSummary aspect for a related entity. */ + private void removeIncidentFromSummary( + @Nonnull final Urn incidentUrn, @Nonnull final Urn entityUrn) { + // 1. Fetch the latest incident summary for the entity + IncidentsSummary summary = getIncidentsSummary(entityUrn); + + // 2. Remove the incident from active and resolved incidents + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(incidentUrn, summary); + IncidentsSummaryUtils.removeIncidentFromActiveSummary(incidentUrn, summary); + + // 3. Emit the change back! + updateIncidentSummary(entityUrn, summary); + } + + /** + * Adds an incident to the IncidentSummary aspect for a related entity. This is used to search for + * entity by active and resolved incidents. + */ + private void addIncidentToSummary( + @Nonnull final Urn incidentUrn, + @Nonnull final Urn entityUrn, + @Nonnull final IncidentInfo info) { + // 1. Fetch the latest incident summary for the entity + IncidentsSummary summary = getIncidentsSummary(entityUrn); + IncidentStatus status = info.getStatus(); + IncidentSummaryDetails details = buildIncidentSummaryDetails(incidentUrn, info); + + // 2. Add the incident to active or resolved incidents + if (IncidentState.ACTIVE.equals(status.getState())) { + // First, ensure this isn't in any summaries anymore. + IncidentsSummaryUtils.removeIncidentFromResolvedSummary(incidentUrn, summary); + + // Then, add to active. + IncidentsSummaryUtils.addIncidentToActiveSummary(details, summary, _maxIncidentHistory); + + } else if (IncidentState.RESOLVED.equals(status.getState())) { + // First, ensure this isn't in any summaries anymore. + IncidentsSummaryUtils.removeIncidentFromActiveSummary(incidentUrn, summary); + + // Then, add to resolved. + IncidentsSummaryUtils.addIncidentToResolvedSummary(details, summary, _maxIncidentHistory); + } + + // 3. Emit the change back! + updateIncidentSummary(entityUrn, summary); + } + + @Nonnull + private IncidentsSummary getIncidentsSummary(@Nonnull final Urn entityUrn) { + IncidentsSummary maybeIncidentsSummary = _incidentService.getIncidentsSummary(entityUrn); + return maybeIncidentsSummary == null + ? new IncidentsSummary() + .setResolvedIncidentDetails(new IncidentSummaryDetailsArray()) + .setActiveIncidentDetails(new IncidentSummaryDetailsArray()) + : maybeIncidentsSummary; + } + + @Nonnull + private IncidentSummaryDetails buildIncidentSummaryDetails( + @Nonnull final Urn urn, @Nonnull final IncidentInfo info) { + IncidentSummaryDetails incidentSummaryDetails = new IncidentSummaryDetails(); + incidentSummaryDetails.setUrn(urn); + incidentSummaryDetails.setCreatedAt(info.getCreated().getTime()); + if (IncidentType.CUSTOM.equals(info.getType())) { + incidentSummaryDetails.setType(info.getCustomType()); + } else { + incidentSummaryDetails.setType(info.getType().toString()); + } + if (info.hasPriority()) { + incidentSummaryDetails.setPriority(info.getPriority()); + } + if (IncidentState.RESOLVED.equals(info.getStatus().getState())) { + incidentSummaryDetails.setResolvedAt(info.getStatus().getLastUpdated().getTime()); + } + return incidentSummaryDetails; + } + + /** + * Returns true if the event should be processed, which is only true if the change is on the + * incident status aspect + */ + private boolean isEligibleForProcessing(@Nonnull final MetadataChangeLog event) { + return isIncidentSoftDeleted(event) || isIncidentUpdate(event); + } + + /** Returns true if an incident is being soft-deleted. */ + private boolean isIncidentSoftDeleted(@Nonnull final MetadataChangeLog event) { + return INCIDENT_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && isSoftDeletionEvent(event); + } + + private boolean isSoftDeletionEvent(@Nonnull final MetadataChangeLog event) { + if (STATUS_ASPECT_NAME.equals(event.getAspectName()) && event.getAspect() != null) { + final Status status = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), event.getAspect().getContentType(), Status.class); + return status.hasRemoved() && status.isRemoved(); + } + return false; + } + + /** Returns true if the event represents an incident deletion event. */ + private boolean isIncidentUpdate(@Nonnull final MetadataChangeLog event) { + return INCIDENT_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && SUPPORTED_UPDATE_ASPECTS.contains(event.getAspectName()); + } + + /** Updates the incidents summary for a given entity */ + private void updateIncidentSummary( + @Nonnull final Urn entityUrn, @Nonnull final IncidentsSummary newSummary) { + try { + _incidentService.updateIncidentsSummary(entityUrn, newSummary); + } catch (Exception e) { + log.error( + String.format( + "Failed to updated incidents summary for entity with urn %s! Skipping updating the summary", + entityUrn), + e); + } + } +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHookTest.java new file mode 100644 index 0000000000000..d45612cf1ef83 --- /dev/null +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHookTest.java @@ -0,0 +1,309 @@ +package com.linkedin.metadata.kafka.hook.incident; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.IncidentSummaryDetails; +import com.linkedin.common.IncidentSummaryDetailsArray; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.Status; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.service.IncidentService; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeLog; +import java.util.List; +import java.util.stream.Collectors; +import org.mockito.Mockito; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +public class IncidentsSummaryHookTest { + private static final Urn TEST_EXISTING_INCIDENT_URN = UrnUtils.getUrn("urn:li:incident:existing"); + private static final Urn TEST_INCIDENT_URN = UrnUtils.getUrn("urn:li:incident:test"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name2,PROD)"); + private static final String TEST_INCIDENT_TYPE = "TestType"; + + @Test + public void testInvokeNotEnabled() throws Exception { + IncidentInfo incidentInfo = + mockIncidentInfo( + ImmutableList.of(TEST_DATASET_URN, TEST_DATASET_2_URN), IncidentState.ACTIVE); + IncidentService service = mockIncidentService(new IncidentsSummary(), incidentInfo); + IncidentsSummaryHook hook = new IncidentsSummaryHook(ENTITY_REGISTRY, service, false, 100); + final MetadataChangeLog event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, ChangeType.UPSERT, incidentInfo); + hook.invoke(event); + Mockito.verify(service, Mockito.times(0)).getIncidentInfo(Mockito.any()); + } + + @Test + public void testInvokeNotEligibleChange() throws Exception { + IncidentInfo info = + mockIncidentInfo( + ImmutableList.of(TEST_DATASET_URN, TEST_DATASET_2_URN), IncidentState.ACTIVE); + IncidentService service = mockIncidentService(new IncidentsSummary(), info); + IncidentsSummaryHook hook = new IncidentsSummaryHook(ENTITY_REGISTRY, service, true, 100); + + // Case 1: Incorrect aspect + MetadataChangeLog event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, INCIDENT_KEY_ASPECT_NAME, ChangeType.UPSERT, new IncidentInfo()); + hook.invoke(event); + Mockito.verify(service, Mockito.times(0)).getIncidentInfo(Mockito.any()); + + // Case 2: Run Event But Delete + event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, ChangeType.DELETE, info); + hook.invoke(event); + Mockito.verify(service, Mockito.times(0)).getIncidentInfo(Mockito.any()); + } + + @DataProvider(name = "incidentsSummaryBaseProvider") + static Object[][] incidentsSummaryBaseProvider() { + return new Object[][] { + new Object[] {null}, + new Object[] { + new IncidentsSummary() + .setActiveIncidentDetails(new IncidentSummaryDetailsArray()) + .setResolvedIncidentDetails(new IncidentSummaryDetailsArray()) + }, + new Object[] { + new IncidentsSummary() + .setActiveIncidentDetails( + new IncidentSummaryDetailsArray( + ImmutableList.of( + new IncidentSummaryDetails() + .setUrn(TEST_EXISTING_INCIDENT_URN) + .setType(TEST_INCIDENT_TYPE) + .setCreatedAt(0L)))) + .setResolvedIncidentDetails(new IncidentSummaryDetailsArray()) + }, + new Object[] { + new IncidentsSummary() + .setActiveIncidentDetails(new IncidentSummaryDetailsArray()) + .setResolvedIncidentDetails( + new IncidentSummaryDetailsArray( + ImmutableList.of( + new IncidentSummaryDetails() + .setUrn(TEST_EXISTING_INCIDENT_URN) + .setType(TEST_INCIDENT_TYPE) + .setCreatedAt(0L)))) + }, + new Object[] { + new IncidentsSummary() + .setActiveIncidentDetails( + new IncidentSummaryDetailsArray( + ImmutableList.of( + new IncidentSummaryDetails() + .setUrn(TEST_INCIDENT_URN) + .setType(TEST_INCIDENT_TYPE) + .setCreatedAt(0L)))) + .setResolvedIncidentDetails( + new IncidentSummaryDetailsArray( + ImmutableList.of( + new IncidentSummaryDetails() + .setUrn(TEST_INCIDENT_URN) + .setType(TEST_INCIDENT_TYPE) + .setCreatedAt(0L)))) + } + }; + } + + @Test(dataProvider = "incidentsSummaryBaseProvider") + public void testInvokeIncidentRunEventActive(IncidentsSummary summary) throws Exception { + IncidentInfo info = + mockIncidentInfo( + ImmutableList.of(TEST_DATASET_URN, TEST_DATASET_2_URN), IncidentState.ACTIVE); + IncidentService service = mockIncidentService(summary, info); + IncidentsSummaryHook hook = new IncidentsSummaryHook(ENTITY_REGISTRY, service, true, 100); + final MetadataChangeLog event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, ChangeType.UPSERT, info); + hook.invoke(event); + Mockito.verify(service, Mockito.times(1)).getIncidentInfo(Mockito.eq(TEST_INCIDENT_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN)); + + if (summary == null) { + summary = new IncidentsSummary(); + } + IncidentsSummary expectedSummary = new IncidentsSummary(summary.data()); + expectedSummary.setActiveIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getActiveIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + expectedSummary.setResolvedIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getResolvedIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + expectedSummary + .getActiveIncidentDetails() + .add(buildIncidentSummaryDetails(TEST_INCIDENT_URN, info)); + + // Ensure we ingested a new aspect. + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_URN), Mockito.eq(expectedSummary)); + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN), Mockito.eq(expectedSummary)); + } + + @Test(dataProvider = "incidentsSummaryBaseProvider") + public void testInvokeIncidentRunEventResolved(IncidentsSummary summary) throws Exception { + IncidentInfo info = + mockIncidentInfo( + ImmutableList.of(TEST_DATASET_URN, TEST_DATASET_2_URN), IncidentState.RESOLVED); + IncidentService service = mockIncidentService(summary, info); + IncidentsSummaryHook hook = new IncidentsSummaryHook(ENTITY_REGISTRY, service, true, 100); + final MetadataChangeLog event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, INCIDENT_INFO_ASPECT_NAME, ChangeType.UPSERT, info); + hook.invoke(event); + Mockito.verify(service, Mockito.times(1)).getIncidentInfo(Mockito.eq(TEST_INCIDENT_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN)); + + if (summary == null) { + summary = new IncidentsSummary(); + } + IncidentsSummary expectedSummary = new IncidentsSummary(summary.data()); + expectedSummary.setActiveIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getActiveIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + expectedSummary.setResolvedIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getResolvedIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + expectedSummary + .getResolvedIncidentDetails() + .add(buildIncidentSummaryDetails(TEST_INCIDENT_URN, info)); + + // Ensure we ingested a new aspect. + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_URN), Mockito.eq(expectedSummary)); + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN), Mockito.eq(expectedSummary)); + } + + @Test(dataProvider = "incidentsSummaryBaseProvider") + public void testInvokeIncidentSoftDeleted(IncidentsSummary summary) throws Exception { + IncidentInfo info = + mockIncidentInfo( + ImmutableList.of(TEST_DATASET_URN, TEST_DATASET_2_URN), IncidentState.RESOLVED); + IncidentService service = mockIncidentService(summary, info); + IncidentsSummaryHook hook = new IncidentsSummaryHook(ENTITY_REGISTRY, service, true, 100); + final MetadataChangeLog event = + buildMetadataChangeLog( + TEST_INCIDENT_URN, STATUS_ASPECT_NAME, ChangeType.UPSERT, mockIncidentSoftDeleted()); + hook.invoke(event); + + Mockito.verify(service, Mockito.times(1)).getIncidentInfo(Mockito.eq(TEST_INCIDENT_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_URN)); + Mockito.verify(service, Mockito.times(1)).getIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN)); + + if (summary == null) { + summary = new IncidentsSummary(); + } + IncidentsSummary expectedSummary = new IncidentsSummary(summary.data()); + expectedSummary.setResolvedIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getResolvedIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + expectedSummary.setActiveIncidentDetails( + new IncidentSummaryDetailsArray( + expectedSummary.getActiveIncidentDetails().stream() + .filter(details -> !details.getUrn().equals(TEST_INCIDENT_URN)) + .collect(Collectors.toList()))); + + // Ensure we ingested a new aspect. + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_URN), Mockito.eq(expectedSummary)); + Mockito.verify(service, Mockito.times(1)) + .updateIncidentsSummary(Mockito.eq(TEST_DATASET_2_URN), Mockito.eq(expectedSummary)); + } + + private IncidentInfo mockIncidentInfo(final List entityUrns, final IncidentState state) { + IncidentInfo event = new IncidentInfo(); + event.setEntities(new UrnArray(entityUrns)); + event.setType(IncidentType.OPERATIONAL); + event.setSource(new IncidentSource().setType(IncidentSourceType.MANUAL)); + event.setStatus( + new IncidentStatus() + .setState(state) + .setLastUpdated( + new AuditStamp().setTime(1L).setActor(UrnUtils.getUrn("urn:li:corpuser:test")))); + event.setCreated( + new AuditStamp().setTime(0L).setActor(UrnUtils.getUrn("urn:li:corpuser:test"))); + return event; + } + + private Status mockIncidentSoftDeleted() { + Status status = new Status(); + status.setRemoved(true); + return status; + } + + private IncidentService mockIncidentService(IncidentsSummary summary, IncidentInfo info) { + IncidentService mockService = Mockito.mock(IncidentService.class); + + Mockito.when(mockService.getIncidentInfo(TEST_INCIDENT_URN)).thenReturn(info); + + Mockito.when(mockService.getIncidentsSummary(TEST_DATASET_URN)).thenReturn(summary); + Mockito.when(mockService.getIncidentsSummary(TEST_DATASET_2_URN)).thenReturn(summary); + + return mockService; + } + + private IncidentSummaryDetails buildIncidentSummaryDetails( + final Urn incidentUrn, final IncidentInfo info) { + IncidentSummaryDetails incidentSummaryDetails = new IncidentSummaryDetails(); + incidentSummaryDetails.setUrn(incidentUrn); + incidentSummaryDetails.setCreatedAt(info.getCreated().getTime()); + if (IncidentType.CUSTOM.equals(info.getType())) { + incidentSummaryDetails.setType(info.getCustomType()); + } else { + incidentSummaryDetails.setType(info.getType().toString()); + } + if (info.hasPriority()) { + incidentSummaryDetails.setPriority(info.getPriority()); + } + if (IncidentState.RESOLVED.equals(info.getStatus().getState())) { + incidentSummaryDetails.setResolvedAt(info.getStatus().getLastUpdated().getTime()); + } + return incidentSummaryDetails; + } + + private MetadataChangeLog buildMetadataChangeLog( + Urn urn, String aspectName, ChangeType changeType, RecordTemplate aspect) throws Exception { + MetadataChangeLog event = new MetadataChangeLog(); + event.setEntityUrn(urn); + event.setEntityType(INCIDENT_ENTITY_NAME); + event.setAspectName(aspectName); + event.setChangeType(changeType); + event.setAspect(GenericRecordUtils.serializeAspect(aspect)); + return event; + } +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java index 6d1bdca9c116f..130dc024ad528 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java @@ -6,6 +6,7 @@ import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; +import com.linkedin.metadata.kafka.hook.incident.IncidentsSummaryHook; import com.linkedin.metadata.kafka.hook.ingestion.IngestionSchedulerHook; import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; @@ -44,5 +45,10 @@ public void testHooks() { assertTrue( metadataChangeLogProcessor.getHooks().stream() .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); + assertEquals( + 1, + metadataChangeLogProcessor.getHooks().stream() + .filter(hook -> hook instanceof IncidentsSummaryHook) + .count()); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentsSummaryUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentsSummaryUtils.java new file mode 100644 index 0000000000000..2c580ba36b22a --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/IncidentsSummaryUtils.java @@ -0,0 +1,93 @@ +package com.linkedin.metadata.service; + +import com.linkedin.common.IncidentSummaryDetails; +import com.linkedin.common.IncidentSummaryDetailsArray; +import com.linkedin.common.IncidentsSummary; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class IncidentsSummaryUtils { + + public static void removeIncidentFromResolvedSummary( + @Nonnull final Urn incidentUrn, @Nonnull final IncidentsSummary summary) { + // Legacy - remove from deprecated field. + if (summary.hasResolvedIncidents()) { + final Set resolvedIncidents = new HashSet<>(summary.getResolvedIncidents()); + resolvedIncidents.remove(incidentUrn); + summary.setResolvedIncidents(new UrnArray(new ArrayList<>(resolvedIncidents))); + } + // New - remove from new field. + if (summary.hasResolvedIncidentDetails()) { + final Set filteredDetails = + summary.getResolvedIncidentDetails().stream() + .filter(details -> !incidentUrn.equals(details.getUrn())) + .collect(Collectors.toSet()); + summary.setResolvedIncidentDetails( + new IncidentSummaryDetailsArray(new ArrayList<>(filteredDetails))); + } + } + + public static void removeIncidentFromActiveSummary( + @Nonnull final Urn incidentUrn, @Nonnull final IncidentsSummary summary) { + // Legacy - remove the deprecated field. + if (summary.hasActiveIncidents()) { + final Set activeIncidents = new HashSet<>(summary.getActiveIncidents()); + activeIncidents.remove(incidentUrn); + summary.setActiveIncidents(new UrnArray(new ArrayList<>(activeIncidents))); + } + // New - remove from the new field. + if (summary.hasActiveIncidentDetails()) { + final Set filteredDetails = + summary.getActiveIncidentDetails().stream() + .filter(details -> !incidentUrn.equals(details.getUrn())) + .collect(Collectors.toSet()); + summary.setActiveIncidentDetails( + new IncidentSummaryDetailsArray(new ArrayList<>(filteredDetails))); + } + } + + public static void addIncidentToResolvedSummary( + @Nonnull final IncidentSummaryDetails details, + @Nonnull final IncidentsSummary summary, + int maxIncidentHistory) { + final List existingDetails = summary.getResolvedIncidentDetails(); + final List newDetails = + existingDetails.stream() + .filter(existing -> !details.getUrn().equals(existing.getUrn())) + .sorted(Comparator.comparing(IncidentSummaryDetails::getCreatedAt)) + .collect(Collectors.toList()); + while (newDetails.size() >= maxIncidentHistory && !newDetails.isEmpty()) { + // Removes oldest entry until size is less than max size + newDetails.remove(0); + } + newDetails.add(details); + summary.setResolvedIncidentDetails(new IncidentSummaryDetailsArray(newDetails)); + } + + public static void addIncidentToActiveSummary( + @Nonnull final IncidentSummaryDetails details, + @Nonnull final IncidentsSummary summary, + int maxIncidentHistory) { + final List existingDetails = summary.getActiveIncidentDetails(); + final List newDetails = + existingDetails.stream() + .filter(existing -> !details.getUrn().equals(existing.getUrn())) + .sorted(Comparator.comparing(IncidentSummaryDetails::getCreatedAt)) + .collect(Collectors.toList()); + while (newDetails.size() >= maxIncidentHistory && !newDetails.isEmpty()) { + // Removes oldest entry until size is less than max size + newDetails.remove(0); + } + newDetails.add(details); + summary.setActiveIncidentDetails(new IncidentSummaryDetailsArray(newDetails)); + } + + private IncidentsSummaryUtils() {} +} From 814863aebd4d157e732498ff3a801c868840fcc2 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 15:34:09 -0800 Subject: [PATCH 08/25] Add support for fetching incidents from GraphQL, along with mutations required inside incidents tab --- datahub-web-react/src/graphql/chart.graphql | 6 ++++++ datahub-web-react/src/graphql/dashboard.graphql | 3 +++ datahub-web-react/src/graphql/dataFlow.graphql | 6 ++++++ datahub-web-react/src/graphql/dataset.graphql | 8 ++++---- datahub-web-react/src/graphql/fragments.graphql | 16 ++++++++++++++++ datahub-web-react/src/graphql/mutations.graphql | 8 ++++++++ datahub-web-react/src/graphql/preview.graphql | 15 +++++++++++++++ datahub-web-react/src/graphql/search.graphql | 17 +++++++++++++---- 8 files changed, 71 insertions(+), 8 deletions(-) diff --git a/datahub-web-react/src/graphql/chart.graphql b/datahub-web-react/src/graphql/chart.graphql index ad6028b47560c..0f9b326b2374d 100644 --- a/datahub-web-react/src/graphql/chart.graphql +++ b/datahub-web-react/src/graphql/chart.graphql @@ -111,6 +111,12 @@ query getChart($urn: String!) { ...structuredPropertiesFields } } + health { + ...entityHealth + } + activeIncidents: incidents(start: 0, count: 1, state: ACTIVE) { + total + } } } diff --git a/datahub-web-react/src/graphql/dashboard.graphql b/datahub-web-react/src/graphql/dashboard.graphql index 39d23c6953a44..9f4ee0401335b 100644 --- a/datahub-web-react/src/graphql/dashboard.graphql +++ b/datahub-web-react/src/graphql/dashboard.graphql @@ -15,6 +15,9 @@ query getDashboard($urn: String!) { ...structuredPropertiesFields } } + activeIncidents: incidents(start: 0, count: 1, state: ACTIVE) { + total + } } } diff --git a/datahub-web-react/src/graphql/dataFlow.graphql b/datahub-web-react/src/graphql/dataFlow.graphql index ab1a64e856736..2aec7dc4242d8 100644 --- a/datahub-web-react/src/graphql/dataFlow.graphql +++ b/datahub-web-react/src/graphql/dataFlow.graphql @@ -50,6 +50,12 @@ fragment dataFlowFields on DataFlow { browsePathV2 { ...browsePathV2Fields } + health { + ...entityHealth + } + activeIncidents: incidents(start: 0, count: 1, state: ACTIVE) { + total + } } query getDataFlow($urn: String!) { diff --git a/datahub-web-react/src/graphql/dataset.graphql b/datahub-web-react/src/graphql/dataset.graphql index e25d4fe6c8635..2fedd2c36e77e 100644 --- a/datahub-web-react/src/graphql/dataset.graphql +++ b/datahub-web-react/src/graphql/dataset.graphql @@ -98,10 +98,7 @@ fragment nonSiblingDatasetFields on Dataset { } } health { - type - status - message - causes + ...entityHealth } assertions(start: 0, count: 1) { total @@ -163,6 +160,9 @@ fragment nonSiblingDatasetFields on Dataset { siblings { isPrimary } + activeIncidents: incidents(start: 0, count: 1, state: ACTIVE) { + total + } privileges { canEditLineage canEditEmbed diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index dc534b315aadf..baac71c91bd02 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -401,6 +401,12 @@ fragment dataJobFields on DataJob { subTypes { typeNames } + health { + ...entityHealth + } + activeIncidents: incidents(start: 0, count: 1, state: ACTIVE) { + total + } } fragment dashboardFields on Dashboard { @@ -496,6 +502,9 @@ fragment dashboardFields on Dashboard { canEditLineage canEditEmbed } + health { + ...entityHealth + } } fragment nonRecursiveMLFeature on MLFeature { @@ -1313,3 +1322,10 @@ fragment formPromptAssociationFields on FormPromptAssociation { } } } + +fragment entityHealth on Health { + type + status + message + causes +} diff --git a/datahub-web-react/src/graphql/mutations.graphql b/datahub-web-react/src/graphql/mutations.graphql index 077922cee45fb..70ec8beaa4147 100644 --- a/datahub-web-react/src/graphql/mutations.graphql +++ b/datahub-web-react/src/graphql/mutations.graphql @@ -102,6 +102,14 @@ mutation batchUpdateSoftDeleted($input: BatchUpdateSoftDeletedInput!) { batchUpdateSoftDeleted(input: $input) } +mutation raiseIncident($input: RaiseIncidentInput!) { + raiseIncident(input: $input) +} + +mutation updateIncidentStatus($urn: String!, $input: UpdateIncidentStatusInput!) { + updateIncidentStatus(urn: $urn, input: $input) +} + mutation batchAssignRole($input: BatchAssignRoleInput!) { batchAssignRole(input: $input) } diff --git a/datahub-web-react/src/graphql/preview.graphql b/datahub-web-react/src/graphql/preview.graphql index 330b78a5ae8f4..daa11b2e627e1 100644 --- a/datahub-web-react/src/graphql/preview.graphql +++ b/datahub-web-react/src/graphql/preview.graphql @@ -39,6 +39,9 @@ fragment entityPreview on Entity { deprecation { ...deprecationFields } + health { + ...entityHealth + } } ... on CorpUser { username @@ -107,6 +110,9 @@ fragment entityPreview on Entity { subTypes { typeNames } + health { + ...entityHealth + } } ... on Chart { urn @@ -148,6 +154,9 @@ fragment entityPreview on Entity { subTypes { typeNames } + health { + ...entityHealth + } } ... on DataFlow { urn @@ -182,6 +191,9 @@ fragment entityPreview on Entity { deprecation { ...deprecationFields } + health { + ...entityHealth + } } ... on DataJob { urn @@ -216,6 +228,9 @@ fragment entityPreview on Entity { subTypes { typeNames } + health { + ...entityHealth + } } ... on GlossaryTerm { name diff --git a/datahub-web-react/src/graphql/search.graphql b/datahub-web-react/src/graphql/search.graphql index 7034116f76129..e8694447a0657 100644 --- a/datahub-web-react/src/graphql/search.graphql +++ b/datahub-web-react/src/graphql/search.graphql @@ -372,10 +372,7 @@ fragment nonSiblingsDatasetSearchFields on Dataset { ...deprecationFields } health { - type - status - message - causes + ...entityHealth } ...datasetStatsFields } @@ -502,6 +499,9 @@ fragment searchResultFields on Entity { subTypes { typeNames } + health { + ...entityHealth + } } ... on Chart { chartId @@ -568,6 +568,9 @@ fragment searchResultFields on Entity { subTypes { typeNames } + health { + ...entityHealth + } } ... on DataFlow { flowId @@ -606,6 +609,9 @@ fragment searchResultFields on Entity { childJobs: relationships(input: { types: ["IsPartOf"], direction: INCOMING, start: 0, count: 100 }) { total } + health { + ...entityHealth + } } ... on DataJob { dataFlow { @@ -655,6 +661,9 @@ fragment searchResultFields on Entity { } } } + health { + ...entityHealth + } } ... on GlossaryTerm { name From 7087346b09e2282950212f6e1a0e7422b2aa6cdd Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 23 Jan 2024 21:16:13 -0800 Subject: [PATCH 09/25] Support raising and resolving incidents for Datasets with caching via the UI --- datahub-web-react/src/App.tsx | 5 + datahub-web-react/src/app/analytics/event.ts | 2 + .../src/app/entity/chart/ChartEntity.tsx | 11 +- .../app/entity/dashboard/DashboardEntity.tsx | 11 +- .../app/entity/dataFlow/DataFlowEntity.tsx | 11 +- .../src/app/entity/dataJob/DataJobEntity.tsx | 11 +- .../src/app/entity/dataset/DatasetEntity.tsx | 11 +- .../shared/EntityDropdown/EntityDropdown.tsx | 39 ++- .../shared/tabs/Incident/IncidentTab.tsx | 127 +++++++ .../Incident/components/AddIncidentModal.tsx | 178 ++++++++++ .../Incident/components/IncidentListItem.tsx | 309 ++++++++++++++++++ .../Incident/components/IncidentSummary.tsx | 86 +++++ .../components/IncidentsLoadingSection.tsx | 43 +++ .../components/ResolveIncidentModal.tsx | 54 +++ .../shared/tabs/Incident/incidentUtils.ts | 138 ++++++++ .../src/graphql/incident.graphql | 75 +++++ 16 files changed, 1105 insertions(+), 6 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/IncidentTab.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/components/AddIncidentModal.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentListItem.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentSummary.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentsLoadingSection.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/components/ResolveIncidentModal.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Incident/incidentUtils.ts create mode 100644 datahub-web-react/src/graphql/incident.graphql diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index e8910e7dc2ea8..5be31528fe780 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -52,6 +52,11 @@ const client = new ApolloClient({ return { ...oldObj, ...newObj }; }, }, + entity: { + merge: (oldObj, newObj) => { + return { ...oldObj, ...newObj }; + }, + }, }, }, }, diff --git a/datahub-web-react/src/app/analytics/event.ts b/datahub-web-react/src/app/analytics/event.ts index dd670b35d49e0..f8e2534e44c31 100644 --- a/datahub-web-react/src/app/analytics/event.ts +++ b/datahub-web-react/src/app/analytics/event.ts @@ -303,6 +303,8 @@ export const EntityActionType = { UpdateSchemaTags: 'UpdateSchemaTags', UpdateSchemaTerms: 'UpdateSchemaTerms', ClickExternalUrl: 'ClickExternalUrl', + AddIncident: 'AddIncident', + ResolvedIncident: 'ResolvedIncident', }; export interface EntityActionEvent extends BaseEvent { type: EventType.EntityActionEvent; diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index d2d35aad7c29f..067e13947c047 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -27,6 +27,7 @@ import EmbeddedProfile from '../shared/embed/EmbeddedProfile'; import { LOOKER_URN } from '../../ingest/source/builder/constants'; import { MatchedFieldList } from '../../search/matches/MatchedFieldList'; import { matchedInputFieldRenderer } from '../../search/matches/matchedInputFieldRenderer'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; /** * Definition of the DataHub Chart entity. @@ -80,7 +81,7 @@ export class ChartEntity implements Entity { useEntityQuery={useGetChartQuery} useUpdateQuery={useUpdateChartMutation} getOverrideProperties={this.getOverridePropertiesFromEntity} - headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION])} + headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.RAISE_INCIDENT])} subHeader={{ component: ChartStatsSummarySubHeader, }} @@ -126,6 +127,14 @@ export class ChartEntity implements Entity { name: 'Properties', component: PropertiesTab, }, + { + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, chart) => { + const activeIncidentCount = chart?.chart?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; + }, + }, ]} sidebarSections={[ { diff --git a/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx b/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx index 0a36d0e5f1bfa..f8c0ec70dc946 100644 --- a/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx +++ b/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx @@ -31,6 +31,7 @@ import { getDataProduct } from '../shared/utils'; import { LOOKER_URN } from '../../ingest/source/builder/constants'; import { MatchedFieldList } from '../../search/matches/MatchedFieldList'; import { matchedInputFieldRenderer } from '../../search/matches/matchedInputFieldRenderer'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; /** * Definition of the DataHub Dashboard entity. @@ -84,7 +85,7 @@ export class DashboardEntity implements Entity { useEntityQuery={useGetDashboardQuery} useUpdateQuery={useUpdateDashboardMutation} getOverrideProperties={this.getOverridePropertiesFromEntity} - headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION])} + headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.RAISE_INCIDENT])} subHeader={{ component: DashboardStatsSummarySubHeader, }} @@ -134,6 +135,14 @@ export class DashboardEntity implements Entity { name: 'Properties', component: PropertiesTab, }, + { + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dashboard) => { + const activeIncidentCount = dashboard?.dashboard?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; + }, + }, ]} sidebarSections={[ { diff --git a/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx b/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx index 3bf24ac276c8e..832d037ab176a 100644 --- a/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx +++ b/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx @@ -18,6 +18,7 @@ import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import { capitalizeFirstLetterOnly } from '../../shared/textUtil'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; /** * Definition of the DataHub DataFlow entity. @@ -65,7 +66,7 @@ export class DataFlowEntity implements Entity { useEntityQuery={useGetDataFlowQuery} useUpdateQuery={useUpdateDataFlowMutation} getOverrideProperties={this.getOverridePropertiesFromEntity} - headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION])} + headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.RAISE_INCIDENT])} tabs={[ { name: 'Documentation', @@ -79,6 +80,14 @@ export class DataFlowEntity implements Entity { name: 'Tasks', component: DataFlowJobsTab, }, + { + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataFlow) => { + const activeIncidentCount = dataFlow?.dataFlow?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; + }, + }, ]} sidebarSections={[ { diff --git a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx index 3229bc37706a4..c1745d778f6fb 100644 --- a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx +++ b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx @@ -21,6 +21,7 @@ import { DataFlowEntity } from '../dataFlow/DataFlowEntity'; import { capitalizeFirstLetterOnly } from '../../shared/textUtil'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; const getDataJobPlatformName = (data?: DataJob): string => { return ( @@ -76,7 +77,7 @@ export class DataJobEntity implements Entity { useEntityQuery={useGetDataJobQuery} useUpdateQuery={useUpdateDataJobMutation} getOverrideProperties={this.getOverridePropertiesFromEntity} - headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION])} + headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.RAISE_INCIDENT])} tabs={[ { name: 'Documentation', @@ -102,6 +103,14 @@ export class DataJobEntity implements Entity { enabled: (_, dataJob: GetDataJobQuery) => (dataJob?.dataJob?.runs?.total || 0) !== 0, }, }, + { + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataJob) => { + const activeIncidentCount = dataJob?.dataJob?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; + }, + }, ]} sidebarSections={[ { diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index 90fac38ebd6b3..e568ed03e43b4 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -34,6 +34,7 @@ import { getDataProduct } from '../shared/utils'; import AccessManagement from '../shared/tabs/Dataset/AccessManagement/AccessManagement'; import { matchedFieldPathsRenderer } from '../../search/matches/matchedFieldPathsRenderer'; import { getLastUpdatedMs } from './shared/utils'; +import { IncidentTab } from '../../entity/shared/tabs/Incident/IncidentTab'; const SUBTYPES = { VIEW: 'view', @@ -93,7 +94,7 @@ export class DatasetEntity implements Entity { useEntityQuery={useGetDatasetQuery} useUpdateQuery={useUpdateDatasetMutation} getOverrideProperties={this.getOverridePropertiesFromEntity} - headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION])} + headerDropdownItems={new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.RAISE_INCIDENT])} subHeader={{ component: DatasetStatsSummarySubHeader, }} @@ -189,6 +190,14 @@ export class DatasetEntity implements Entity { enabled: (_, _2) => true, }, }, + { + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataset) => { + const activeIncidentCount = dataset?.dataset?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; + }, + }, ]} sidebarSections={this.getSidebarSections()} /> diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 664a77a731d34..2856a219c435d 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -10,8 +10,9 @@ import { MoreOutlined, PlusOutlined, CopyOutlined, + WarningOutlined, } from '@ant-design/icons'; -import { Redirect } from 'react-router'; +import { Redirect, useHistory } from 'react-router'; import { EntityType } from '../../../../types.generated'; import CreateGlossaryEntityModal from './CreateGlossaryEntityModal'; import { UpdateDeprecationModal } from './UpdateDeprecationModal'; @@ -25,6 +26,9 @@ import { shouldDisplayChildDeletionWarning, isDeleteDisabled, isMoveDisabled } f import { useUserContext } from '../../../context/useUserContext'; import MoveDomainModal from './MoveDomainModal'; import { useIsNestedDomainsEnabled } from '../../../useAppConfig'; +import { getEntityPath } from '../containers/profile/utils'; +import { useIsSeparateSiblingsMode } from '../siblingUtils'; +import { AddIncidentModal } from '../tabs/Incident/components/AddIncidentModal'; export enum EntityMenuItems { COPY_URL, @@ -34,6 +38,7 @@ export enum EntityMenuItems { DELETE, MOVE, CLONE, + RAISE_INCIDENT, } export const MenuIcon = styled(MoreOutlined)<{ fontSize?: number }>` @@ -81,6 +86,8 @@ interface Props { } function EntityDropdown(props: Props) { + const history = useHistory(); + const { urn, entityData, @@ -97,6 +104,7 @@ function EntityDropdown(props: Props) { const me = useUserContext(); const entityRegistry = useEntityRegistry(); const [updateDeprecation] = useUpdateDeprecationMutation(); + const isHideSiblingMode = useIsSeparateSiblingsMode(); const isNestedDomainsEnabled = useIsNestedDomainsEnabled(); const { onDeleteEntity, hasBeenDeleted } = useDeleteEntity( urn, @@ -112,6 +120,7 @@ function EntityDropdown(props: Props) { const [isCloneEntityModalVisible, setIsCloneEntityModalVisible] = useState(false); const [isDeprecationModalVisible, setIsDeprecationModalVisible] = useState(false); const [isMoveModalVisible, setIsMoveModalVisible] = useState(false); + const [isRaiseIncidentModalVisible, setIsRaiseIncidentModalVisible] = useState(false); const handleUpdateDeprecation = async (deprecatedStatus: boolean) => { message.loading({ content: 'Updating...' }); @@ -245,6 +254,13 @@ function EntityDropdown(props: Props) { )} + {menuItems.has(EntityMenuItems.RAISE_INCIDENT) && ( + + setIsRaiseIncidentModalVisible(true)}> +  Raise Incident + + + )} } trigger={['click']} @@ -285,6 +301,27 @@ function EntityDropdown(props: Props) { )} {isMoveModalVisible && isDomainEntity && setIsMoveModalVisible(false)} />} {hasBeenDeleted && !onDelete && deleteRedirectPath && } + {isRaiseIncidentModalVisible && ( + setIsRaiseIncidentModalVisible(false)} + refetch={ + (() => { + refetchForEntity?.(); + history.push( + `${getEntityPath( + entityType, + urn, + entityRegistry, + false, + isHideSiblingMode, + 'Incidents', + )}`, + ); + }) as any + } + /> + )} ); } diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/IncidentTab.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/IncidentTab.tsx new file mode 100644 index 0000000000000..f3cfd6dc148c7 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/IncidentTab.tsx @@ -0,0 +1,127 @@ +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { Button, Empty, List, Select, Typography } from 'antd'; +import { PlusOutlined } from '@ant-design/icons'; +import { useGetEntityIncidentsQuery } from '../../../../../graphql/incident.generated'; +import TabToolbar from '../../components/styled/TabToolbar'; +import { useEntityData } from '../../EntityContext'; +import IncidentListItem from './components/IncidentListItem'; +import { INCIDENT_DISPLAY_STATES, PAGE_SIZE, getIncidentsStatusSummary } from './incidentUtils'; +import { EntityType, Incident, IncidentState } from '../../../../../types.generated'; +import { IncidentSummary } from './components/IncidentSummary'; +import { AddIncidentModal } from './components/AddIncidentModal'; +import { combineEntityDataWithSiblings } from '../../siblingUtils'; +import { IncidentsLoadingSection } from './components/IncidentsLoadingSection'; +import { ANTD_GRAY } from '../../constants'; + +const Header = styled.div` + border-bottom: 1px solid ${ANTD_GRAY[3]}; + box-shadow: ${(props) => props.theme.styles['box-shadow']}; +`; + +const Summary = styled.div` + display: flex; + align-items: center; + justify-content: space-between; +`; + +const IncidentList = styled.div` + flex: 1; + height: 100%; + overflow: scroll; +`; + +const IncidentStyledList = styled(List)` + &&& { + width: 100%; + border-color: ${(props) => props.theme.styles['border-color-base']}; + flex: 1; + } +`; + +const IncidentStateSelect = styled(Select)` + width: 100px; + margin: 0px 40px; +`; + +export const IncidentTab = () => { + const { urn, entityType } = useEntityData(); + const incidentStates = INCIDENT_DISPLAY_STATES; + const [selectedIncidentState, setSelectedIncidentState] = useState(IncidentState.Active); + const [isRaiseIncidentModalVisible, setIsRaiseIncidentModalVisible] = useState(false); + + // Fetch filtered incidents. + const { loading, data, refetch } = useGetEntityIncidentsQuery({ + variables: { + urn, + start: 0, + count: PAGE_SIZE, + }, + fetchPolicy: 'cache-first', + }); + + const hasData = (data?.entity as any)?.incidents; + const combinedData = (entityType === EntityType.Dataset && combineEntityDataWithSiblings(data)) || data; + const allIncidents = + (combinedData && (combinedData as any).entity?.incidents?.incidents?.map((incident) => incident as Incident)) || + []; + const filteredIncidents = allIncidents.filter( + (incident) => !selectedIncidentState || incident.status?.state === selectedIncidentState, + ); + const incidentList = filteredIncidents?.map((incident) => ({ + urn: incident?.urn, + created: incident.created, + customType: incident.customType, + description: incident.description, + status: incident.status, + type: incident?.incidentType, + title: incident?.title, + })); + + return ( + <> +

+ + + setIsRaiseIncidentModalVisible(false)} + /> + + + + setSelectedIncidentState(newState)} + autoFocus + > + {incidentStates.map((incidentType) => { + return ( + + {incidentType.name} + + ); + })} + + +
+ + {(loading && !hasData && ) || null} + {hasData && ( + + , + }} + dataSource={incidentList} + renderItem={(item: any) => } + /> + + )} + + ); +}; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/components/AddIncidentModal.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/AddIncidentModal.tsx new file mode 100644 index 0000000000000..8d8ea01bf0685 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/AddIncidentModal.tsx @@ -0,0 +1,178 @@ +import React, { useState } from 'react'; +import { message, Modal, Button, Form, Input, Typography, Select } from 'antd'; +import { useApolloClient } from '@apollo/client'; +import analytics, { EventType, EntityActionType } from '../../../../../analytics'; +import { useEntityData } from '../../../EntityContext'; +import { EntityType, IncidentSourceType, IncidentState, IncidentType } from '../../../../../../types.generated'; +import { INCIDENT_DISPLAY_TYPES, PAGE_SIZE, addActiveIncidentToCache } from '../incidentUtils'; +import { useRaiseIncidentMutation } from '../../../../../../graphql/mutations.generated'; +import handleGraphQLError from '../../../../../shared/handleGraphQLError'; +import { useUserContext } from '../../../../../context/useUserContext'; + +type AddIncidentProps = { + visible: boolean; + onClose?: () => void; + refetch?: () => Promise; +}; + +export const AddIncidentModal = ({ visible, onClose, refetch }: AddIncidentProps) => { + const { urn, entityType } = useEntityData(); + const { user } = useUserContext(); + const incidentTypes = INCIDENT_DISPLAY_TYPES; + const [selectedIncidentType, setSelectedIncidentType] = useState(IncidentType.Operational); + const [isOtherTypeSelected, setIsOtherTypeSelected] = useState(false); + const [raiseIncidentMutation] = useRaiseIncidentMutation(); + + const client = useApolloClient(); + const [form] = Form.useForm(); + + const handleClose = () => { + form.resetFields(); + setIsOtherTypeSelected(false); + onClose?.(); + }; + + const onSelectIncidentType = (newType) => { + if (newType === 'OTHER') { + setIsOtherTypeSelected(true); + setSelectedIncidentType(IncidentType.Custom); + } else { + setIsOtherTypeSelected(false); + setSelectedIncidentType(newType); + } + }; + + const handleAddIncident = async (formData: any) => { + raiseIncidentMutation({ + variables: { + input: { + type: selectedIncidentType, + title: formData.title, + description: formData.description, + resourceUrn: urn, + customType: formData.customType, + }, + }, + }) + .then(({ data }) => { + const newIncident = { + urn: data?.raiseIncident, + type: EntityType.Incident, + incidentType: selectedIncidentType, + customType: formData.customType || null, + title: formData.title, + description: formData.description, + status: { + state: IncidentState.Active, + message: null, + lastUpdated: { + __typename: 'AuditStamp', + time: Date.now(), + actor: user?.urn, + }, + }, + source: { + type: IncidentSourceType.Manual, + }, + created: { + time: Date.now(), + actor: user?.urn, + }, + }; + message.success({ content: 'Incident Added', duration: 2 }); + analytics.event({ + type: EventType.EntityActionEvent, + entityType, + entityUrn: urn, + actionType: EntityActionType.AddIncident, + }); + addActiveIncidentToCache(client, urn, newIncident, PAGE_SIZE); + handleClose(); + }) + .catch((error) => { + console.error(error); + handleGraphQLError({ + error, + defaultMessage: 'Failed to raise incident! An unexpected error occurred', + permissionMessage: + 'Unauthorized to raise incident for this asset. Please contact your DataHub administrator.', + }); + }); + }; + + return ( + <> + + Cancel + , + , + ]} + > +
+ Type}> + + + + + {isOtherTypeSelected && ( + + + + )} + + + + + + +
+
+ + ); +}; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentListItem.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentListItem.tsx new file mode 100644 index 0000000000000..c1efc42c5571d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentListItem.tsx @@ -0,0 +1,309 @@ +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { Button, Dropdown, List, Menu, message, Popover, Tag, Tooltip, Typography } from 'antd'; +import { CheckCircleFilled, CheckOutlined, MoreOutlined, WarningFilled } from '@ant-design/icons'; +import { Link } from 'react-router-dom'; +import { useApolloClient } from '@apollo/client'; +import { EntityType, IncidentState, IncidentType } from '../../../../../../types.generated'; +import { FAILURE_COLOR_HEX, getNameFromType, SUCCESS_COLOR_HEX } from '../incidentUtils'; +import { useGetUserQuery } from '../../../../../../graphql/user.generated'; +import { useEntityRegistry } from '../../../../../useEntityRegistry'; +import { toLocalDateTimeString, toRelativeTimeString } from '../../../../../shared/time/timeUtils'; +import { useEntityData, useRefetch } from '../../../EntityContext'; +import analytics, { EntityActionType, EventType } from '../../../../../analytics'; +import { useUpdateIncidentStatusMutation } from '../../../../../../graphql/mutations.generated'; +import { ResolveIncidentModal } from './ResolveIncidentModal'; +import handleGraphQLError from '../../../../../shared/handleGraphQLError'; + +type Props = { + incident: any; + refetch?: () => Promise; +}; + +const IncidentListContainer = styled(List.Item)` + padding-top: 20px; + padding-bottom: 20px; +`; + +const IncidentItemContainer = styled.div` + display: flex; + justify-content: space-between; + padding-left: 8px; + padding-right: 8px; + width: 100%; +`; + +const IncidentHeaderContainer = styled.div` + display: flex; + justify-content: left; + align-items: center; +`; + +const DescriptionContainer = styled.div` + margin-top: 8px; +`; + +const TitleContainer = styled.div` + display: flex; + align-items: center; +`; + +const IncidentTitle = styled(Typography.Text)` + font-size: 14px; + font-weight: 700; + margin-right: 16px; + color: #000000; + line-height: 22px; + text-align: justify; + max-width: 500px; +`; + +const IncidentTypeTag = styled(Tag)` + width: auto; + height: 26px; + text-align: center; + font-weight: 500; + font-size: 12px; + line-height: 20px; + color: #262626; + padding: 2px 15px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + max-width: 500px; +`; + +const IncidentDescriptionText = styled(Typography.Text)` + max-width: 500px; + font-weight: 500; + font-size: 12px; + line-height: 20px; + color: #262626; + display: block; + word-wrap: break-word; + white-space: normal; + text-align: justify; +`; + +const IncidentCreatedTime = styled(Typography.Text)` + font-weight: 500; + font-size: 12px; + line-height: 20px; + color: #8c8c8c; +`; + +const IncidentResolvedText = styled(Typography.Text)` + font-weight: 500; + font-size: 12px; + line-height: 20px; + color: #8c8c8c; +`; + +const IncidentResolvedTextContainer = styled.div` + display: flex; + align-items: center; +`; + +const IncidentResolvedContainer = styled.div` + display: flex; + align-items: center; + margin-right: 30px; +`; + +const IncidentResolvedButton = styled(Button)` + background: #ffffff; + border: 1px solid #d9d9d9; + box-sizing: border-box; + box-shadow: 0px 0px 4px rgba(0, 0, 0, 0.1); + border-radius: 5px; + color: #262626; + font-weight: 500; + font-size: 12px; + line-height: 20px; +`; + +const MenuIcon = styled(MoreOutlined)` + display: flex; + justify-content: center; + align-items: center; + font-size: 25px; + height: 28px; + margin-left: 5px; +`; + +const MenuItem = styled.div` + font-size: 12px; + padding-left: 12px; + padding-right: 12px; + color: rgba(0, 0, 0, 0.85); +`; + +export default function IncidentListItem({ incident, refetch }: Props) { + const { entityType } = useEntityData(); + const refetchEntity = useRefetch(); + const entityRegistry = useEntityRegistry(); + const [updateIncidentStatusMutation] = useUpdateIncidentStatusMutation(); + const [isResolvedModalVisible, setIsResolvedModalVisible] = useState(false); + + const client = useApolloClient(); + + // Fetching the most recent actor's data. + const { data: createdActor } = useGetUserQuery({ + variables: { urn: incident.created.actor, groupsCount: 0 }, + fetchPolicy: 'cache-first', + }); + const { data: lastUpdatedActor } = useGetUserQuery({ + variables: { urn: incident.status.lastUpdated.actor, groupsCount: 0 }, + fetchPolicy: 'cache-first', + }); + + // Converting the created time into UTC + const createdDate = incident.created.time && new Date(incident.created.time); + const lastModifiedDate = incident.status.lastUpdated.time && new Date(incident.status.lastUpdated.time); + + // Updating the incident status on button click + const updateIncidentStatus = (state: IncidentState, resolvedMessage: string) => { + message.loading({ content: 'Updating...' }); + updateIncidentStatusMutation({ + variables: { urn: incident.urn, input: { state, message: resolvedMessage } }, + }) + .then(() => { + message.destroy(); + analytics.event({ + type: EventType.EntityActionEvent, + entityType, + entityUrn: incident.urn, + actionType: EntityActionType.ResolvedIncident, + }); + message.success({ content: 'Incident updated! .', duration: 2 }); + refetchEntity?.(); + refetch?.(); + setIsResolvedModalVisible(false); + }) + .catch((error) => { + handleGraphQLError({ + error, + defaultMessage: 'Failed to update incident! An unexpected error occurred', + permissionMessage: + 'Unauthorized to update incident for this asset. Please contact your DataHub administrator.', + }); + }); + }; + + // Handle the Resolved Modal visibility + const handleResolved = () => { + setIsResolvedModalVisible(!isResolvedModalVisible); + }; + + const menu = ( + + + updateIncidentStatus(IncidentState.Active, '')} data-testid="reopen-incident"> + Reopen incident + + + + ); + + return ( + <> + + + +
+ + {incident.title} + + {incident.type === IncidentType.Custom + ? incident.customType + : getNameFromType(incident.type)} + + + + {incident?.description} + + + + + Created {toRelativeTimeString(createdDate)} by{' '} + + + {createdActor?.corpUser && ( + + {entityRegistry.getDisplayName(EntityType.CorpUser, createdActor?.corpUser)} + + )} + +
+
+ {incident.status.state === IncidentState.Resolved ? ( + + Note} + content={ + incident?.status.message === null ? ( + No additional details + ) : ( + {incident?.status.message} + ) + } + > + + {incident?.status.lastUpdated && ( + + Resolved {toRelativeTimeString(lastModifiedDate)} by{' '} + + )} + {lastUpdatedActor?.corpUser && ( + + {entityRegistry.getDisplayName( + EntityType.CorpUser, + lastUpdatedActor?.corpUser, + )} + + )} + + + + + + + + ) : ( + + } + onClick={() => handleResolved()} + data-testid="resolve-incident" + > + Resolve + + + + )} +
+
+ {isResolvedModalVisible && ( + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentSummary.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentSummary.tsx new file mode 100644 index 0000000000000..3b178455c9477 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentSummary.tsx @@ -0,0 +1,86 @@ +import React from 'react'; +import { CheckCircleFilled, WarningFilled, StopOutlined } from '@ant-design/icons'; +import { Typography } from 'antd'; +import styled from 'styled-components'; +import { ANTD_GRAY } from '../../../constants'; +import { SUCCESS_COLOR_HEX, FAILURE_COLOR_HEX, PAGE_SIZE } from '../incidentUtils'; + +const SummaryHeader = styled.div` + width: 100%; + height: 80px; + padding-left: 40px; + padding-top: 0px; + display: flex; + align-items: center; + padding-top: 20px; + padding-bottom: 20px; +`; + +const SummaryContainer = styled.div``; + +const SummaryMessage = styled.div` + display: inline-block; + margin-left: 20px; +`; + +const SummaryTitle = styled(Typography.Title)` + && { + padding-bottom: 0px; + margin-bottom: 0px; + } +`; + +export type IncidentsSummary = { + totalIncident: number; + resolvedIncident: number; + activeIncident: number; +}; + +type Props = { + summary: IncidentsSummary; +}; + +const getSummaryIcon = (summary: IncidentsSummary) => { + if (summary.totalIncident === 0) { + return ; + } + if (summary.resolvedIncident === summary.totalIncident) { + return ; + } + return ; +}; + +const getSummaryMessage = (summary: IncidentsSummary) => { + if (summary.totalIncident === 0) { + return 'No Incidents'; + } + if (summary.resolvedIncident === summary.totalIncident) { + return 'There are no active incidents'; + } + if (summary.activeIncident === 1) { + return `There is ${summary.activeIncident} active incident`; + } + if (summary.activeIncident > 1 && summary.activeIncident <= PAGE_SIZE) { + return `There are ${summary.activeIncident} active incidents`; + } + return null; +}; + +export const IncidentSummary = ({ summary }: Props) => { + const summaryIcon = getSummaryIcon(summary); + const summaryMessage = getSummaryMessage(summary); + const subtitleMessage = `${summary.activeIncident} active incidents, ${summary.resolvedIncident} resolved incidents`; + return ( + + +
+ {summaryIcon} + + {summaryMessage} + {subtitleMessage} + +
+
+
+ ); +}; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentsLoadingSection.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentsLoadingSection.tsx new file mode 100644 index 0000000000000..fd5d31ac9e981 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/IncidentsLoadingSection.tsx @@ -0,0 +1,43 @@ +import React from 'react'; +import { Divider, Skeleton } from 'antd'; +import styled from 'styled-components'; + +const Container = styled.div` + display: flex; + flex-direction: column; + justify-content: start; + gap: 12px; + margin-top: 20px; +`; + +const CardWrapper = styled.div` + padding: 0px 40px; +`; + +const CardSkeleton = styled(Skeleton.Input)` + && { + padding: 0px 20px 20px 0px; + height: 100px; + border-radius: 8px; + width: 100%; + } +`; + +export const IncidentsLoadingSection = () => { + return ( + + + + + + + + + + + + + + + ); +}; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Incident/components/ResolveIncidentModal.tsx b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/ResolveIncidentModal.tsx new file mode 100644 index 0000000000000..b915ef0a76a3f --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Incident/components/ResolveIncidentModal.tsx @@ -0,0 +1,54 @@ +import React from 'react'; +import { Modal, Button, Form, Input } from 'antd'; +import { IncidentState } from '../../../../../../types.generated'; + +const { TextArea } = Input; + +type AddIncidentProps = { + handleResolved: () => void; + isResolvedModalVisible: boolean; + updateIncidentStatus: (state: IncidentState, resolvedMessage: string) => void; +}; + +export const ResolveIncidentModal = ({ + handleResolved, + isResolvedModalVisible, + updateIncidentStatus, +}: AddIncidentProps) => { + const [form] = Form.useForm(); + + const handleClose = () => { + form.resetFields(); + handleResolved(); + }; + + const onResolvedIncident = (formData: any) => { + updateIncidentStatus(IncidentState.Resolved, formData.message); + handleClose(); + }; + + return ( + <> + + Cancel + , + , + ]} + > +
+ +