From 02156662b5e7f24f3db908d4d19f8d1bb94a32b5 Mon Sep 17 00:00:00 2001
From: Harshal Sheth
Date: Mon, 6 Nov 2023 12:47:07 -0800
Subject: [PATCH 01/10] docs(ingest): clarify adding source guide (#9161)
---
metadata-ingestion/adding-source.md | 32 ++++++++++++++++-------------
1 file changed, 18 insertions(+), 14 deletions(-)
diff --git a/metadata-ingestion/adding-source.md b/metadata-ingestion/adding-source.md
index a0930102c6827..6baddf6b2010d 100644
--- a/metadata-ingestion/adding-source.md
+++ b/metadata-ingestion/adding-source.md
@@ -6,7 +6,7 @@ There are two ways of adding a metadata ingestion source.
2. You are writing the custom source for yourself and are not going to contribute back (yet).
If you are going for case (1) just follow the steps 1 to 9 below. In case you are building it for yourself you can skip
-steps 4-9 (but maybe write tests and docs for yourself as well) and follow the documentation
+steps 4-8 (but maybe write tests and docs for yourself as well) and follow the documentation
on [how to use custom ingestion sources](../docs/how/add-custom-ingestion-source.md)
without forking Datahub.
@@ -27,6 +27,7 @@ from `ConfigModel`. The [file source](./src/datahub/ingestion/source/file.py) is
We use [pydantic](https://pydantic-docs.helpmanual.io) conventions for documenting configuration flags. Use the `description` attribute to write rich documentation for your configuration field.
For example, the following code:
+
```python
from pydantic import Field
from datahub.api.configuration.common import ConfigModel
@@ -49,12 +50,10 @@ generates the following documentation:
-
:::note
Inline markdown or code snippets are not yet supported for field level documentation.
:::
-
### 2. Set up the reporter
The reporter interface enables the source to report statistics, warnings, failures, and other information about the run.
@@ -71,6 +70,8 @@ some [convenience methods](./src/datahub/emitter/mce_builder.py) for commonly us
### 4. Set up the dependencies
+Note: Steps 4-8 are only required if you intend to contribute the source back to the Datahub project.
+
Declare the source's pip dependencies in the `plugins` variable of the [setup script](./setup.py).
### 5. Enable discoverability
@@ -119,37 +120,38 @@ from datahub.ingestion.api.decorators import (
@capability(SourceCapability.LINEAGE_COARSE, "Enabled by default")
class FileSource(Source):
"""
-
- The File Source can be used to produce all kinds of metadata from a generic metadata events file.
+
+ The File Source can be used to produce all kinds of metadata from a generic metadata events file.
:::note
Events in this file can be in MCE form or MCP form.
:::
-
+
"""
... source code goes here
```
-
#### 7.2 Write custom documentation
-- Create a copy of [`source-docs-template.md`](./source-docs-template.md) and edit all relevant components.
+- Create a copy of [`source-docs-template.md`](./source-docs-template.md) and edit all relevant components.
- Name the document as `` and move it to `metadata-ingestion/docs/sources//.md`. For example for the Kafka platform, under the `kafka` plugin, move the document to `metadata-ingestion/docs/sources/kafka/kafka.md`.
- Add a quickstart recipe corresponding to the plugin under `metadata-ingestion/docs/sources//_recipe.yml`. For example, for the Kafka platform, under the `kafka` plugin, there is a quickstart recipe located at `metadata-ingestion/docs/sources/kafka/kafka_recipe.yml`.
- To write platform-specific documentation (that is cross-plugin), write the documentation under `metadata-ingestion/docs/sources//README.md`. For example, cross-plugin documentation for the BigQuery platform is located under `metadata-ingestion/docs/sources/bigquery/README.md`.
#### 7.3 Viewing the Documentation
-Documentation for the source can be viewed by running the documentation generator from the `docs-website` module.
+Documentation for the source can be viewed by running the documentation generator from the `docs-website` module.
##### Step 1: Build the Ingestion docs
+
```console
# From the root of DataHub repo
./gradlew :metadata-ingestion:docGen
```
If this finishes successfully, you will see output messages like:
+
```console
Ingestion Documentation Generation Complete
############################################
@@ -170,7 +172,8 @@ Ingestion Documentation Generation Complete
You can also find documentation files generated at `./docs/generated/ingestion/sources` relative to the root of the DataHub repo. You should be able to locate your specific source's markdown file here and investigate it to make sure things look as expected.
#### Step 2: Build the Entire Documentation
-To view how this documentation looks in the browser, there is one more step. Just build the entire docusaurus page from the `docs-website` module.
+
+To view how this documentation looks in the browser, there is one more step. Just build the entire docusaurus page from the `docs-website` module.
```console
# From the root of DataHub repo
@@ -178,6 +181,7 @@ To view how this documentation looks in the browser, there is one more step. Jus
```
This will generate messages like:
+
```console
...
> Task :docs-website:yarnGenerate
@@ -219,15 +223,15 @@ BUILD SUCCESSFUL in 35s
36 actionable tasks: 16 executed, 20 up-to-date
```
-After this you need to run the following script from the `docs-website` module.
+After this you need to run the following script from the `docs-website` module.
+
```console
cd docs-website
npm run serve
```
-Now, browse to http://localhost:3000 or whichever port npm is running on, to browse the docs.
-Your source should show up on the left sidebar under `Metadata Ingestion / Sources`.
-
+Now, browse to http://localhost:3000 or whichever port npm is running on, to browse the docs.
+Your source should show up on the left sidebar under `Metadata Ingestion / Sources`.
### 8. Add SQL Alchemy mapping (if applicable)
From 4a4c29030c0cfd2da9eab01798bc74a94fbb8c1d Mon Sep 17 00:00:00 2001
From: Harshal Sheth
Date: Mon, 6 Nov 2023 12:47:24 -0800
Subject: [PATCH 02/10] chore: stop ingestion-smoke CI errors on forks (#9160)
---
.github/workflows/docker-ingestion-smoke.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/docker-ingestion-smoke.yml b/.github/workflows/docker-ingestion-smoke.yml
index 8d52c23792857..82b57d23609a5 100644
--- a/.github/workflows/docker-ingestion-smoke.yml
+++ b/.github/workflows/docker-ingestion-smoke.yml
@@ -47,6 +47,7 @@ jobs:
name: Build and Push Docker Image to Docker Hub
runs-on: ubuntu-latest
needs: setup
+ if: ${{ needs.setup.outputs.publish == 'true' }}
steps:
- name: Check out the repo
uses: actions/checkout@v3
From 86d2b08d2bbecc90e9adffd250c894abe54667e7 Mon Sep 17 00:00:00 2001
From: Harshal Sheth
Date: Mon, 6 Nov 2023 12:58:07 -0800
Subject: [PATCH 03/10] docs(ingest): inherit capabilities from superclasses
(#9174)
---
metadata-ingestion-modules/airflow-plugin/setup.py | 4 ++++
.../src/datahub/ingestion/api/decorators.py | 12 +++++++++++-
.../source/state/stateful_ingestion_base.py | 8 +++++++-
3 files changed, 22 insertions(+), 2 deletions(-)
diff --git a/metadata-ingestion-modules/airflow-plugin/setup.py b/metadata-ingestion-modules/airflow-plugin/setup.py
index a5af881022d8c..e88fc870cb333 100644
--- a/metadata-ingestion-modules/airflow-plugin/setup.py
+++ b/metadata-ingestion-modules/airflow-plugin/setup.py
@@ -101,6 +101,10 @@ def get_long_description():
f"acryl-datahub[testing-utils]{_self_pin}",
# Extra requirements for loading our test dags.
"apache-airflow[snowflake]>=2.0.2",
+ # Connexion's new version breaks Airflow:
+ # See https://github.com/apache/airflow/issues/35234.
+ # TODO: We should transition to using Airflow's constraints file.
+ "connexion<3",
# https://github.com/snowflakedb/snowflake-sqlalchemy/issues/350
# Eventually we want to set this to "snowflake-sqlalchemy>=1.4.3".
# However, that doesn't work with older versions of Airflow. Instead
diff --git a/metadata-ingestion/src/datahub/ingestion/api/decorators.py b/metadata-ingestion/src/datahub/ingestion/api/decorators.py
index 5e4427047104f..b390ffb9dd036 100644
--- a/metadata-ingestion/src/datahub/ingestion/api/decorators.py
+++ b/metadata-ingestion/src/datahub/ingestion/api/decorators.py
@@ -93,10 +93,20 @@ def capability(
"""
def wrapper(cls: Type) -> Type:
- if not hasattr(cls, "__capabilities"):
+ if not hasattr(cls, "__capabilities") or any(
+ # It's from this class and not a superclass.
+ cls.__capabilities is getattr(base, "__capabilities", None)
+ for base in cls.__bases__
+ ):
cls.__capabilities = {}
cls.get_capabilities = lambda: cls.__capabilities.values()
+ # If the superclasses have capability annotations, copy those over.
+ for base in cls.__bases__:
+ base_caps = getattr(base, "__capabilities", None)
+ if base_caps:
+ cls.__capabilities.update(base_caps)
+
cls.__capabilities[capability_name] = CapabilitySetting(
capability=capability_name, description=description, supported=supported
)
diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py
index 7fb2cf9813cab..d11b1f9ad6a53 100644
--- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py
+++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py
@@ -15,11 +15,12 @@
from datahub.configuration.time_window_config import BaseTimeWindowConfig
from datahub.configuration.validate_field_rename import pydantic_renamed_field
from datahub.ingestion.api.common import PipelineContext
+from datahub.ingestion.api.decorators import capability
from datahub.ingestion.api.ingestion_job_checkpointing_provider_base import (
IngestionCheckpointingProviderBase,
JobId,
)
-from datahub.ingestion.api.source import Source, SourceReport
+from datahub.ingestion.api.source import Source, SourceCapability, SourceReport
from datahub.ingestion.source.state.checkpoint import Checkpoint, StateType
from datahub.ingestion.source.state.use_case_handler import (
StatefulIngestionUsecaseHandlerBase,
@@ -177,6 +178,11 @@ class StatefulIngestionReport(SourceReport):
pass
+@capability(
+ SourceCapability.DELETION_DETECTION,
+ "Optionally enabled via `stateful_ingestion.remove_stale_metadata`",
+ supported=True,
+)
class StatefulIngestionSourceBase(Source):
"""
Defines the base class for all stateful sources.
From 2c58c63780970606e50ba95b382dc9ffbde17bfc Mon Sep 17 00:00:00 2001
From: Andrew Sikowitz
Date: Mon, 6 Nov 2023 15:58:57 -0500
Subject: [PATCH 04/10] fix(ingest/datahub-source): Order by version in memory
(#9185)
---
.../source/datahub/datahub_database_reader.py | 100 ++++++++++++++----
.../tests/unit/test_datahub_source.py | 51 +++++++++
2 files changed, 133 insertions(+), 18 deletions(-)
create mode 100644 metadata-ingestion/tests/unit/test_datahub_source.py
diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py
index 96184d8d445e4..e4f1bb275487e 100644
--- a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py
+++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py
@@ -1,9 +1,11 @@
import json
import logging
from datetime import datetime
-from typing import Dict, Iterable, Optional, Tuple
+from typing import Any, Generic, Iterable, List, Optional, Tuple, TypeVar
from sqlalchemy import create_engine
+from sqlalchemy.engine import Row
+from typing_extensions import Protocol
from datahub.emitter.aspect import ASPECT_MAP
from datahub.emitter.mcp import MetadataChangeProposalWrapper
@@ -20,6 +22,62 @@
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+class VersionOrderable(Protocol):
+ createdon: Any # Should restrict to only orderable types
+ version: int
+
+
+ROW = TypeVar("ROW", bound=VersionOrderable)
+
+
+class VersionOrderer(Generic[ROW]):
+ """Orders rows by (createdon, version == 0).
+
+ That is, orders rows first by createdon, and for equal timestamps, puts version 0 rows last.
+ """
+
+ def __init__(self, enabled: bool):
+ # Stores all version 0 aspects for a given createdon timestamp
+ # Once we have emitted all aspects for a given timestamp, we can emit the version 0 aspects
+ # Guaranteeing that, for a given timestamp, we always ingest version 0 aspects last
+ self.queue: Optional[Tuple[datetime, List[ROW]]] = None
+ self.enabled = enabled
+
+ def __call__(self, rows: Iterable[ROW]) -> Iterable[ROW]:
+ for row in rows:
+ yield from self._process_row(row)
+ yield from self._flush_queue()
+
+ def _process_row(self, row: ROW) -> Iterable[ROW]:
+ if not self.enabled:
+ yield row
+ return
+
+ yield from self._attempt_queue_flush(row)
+ if row.version == 0:
+ self._add_to_queue(row)
+ else:
+ yield row
+
+ def _add_to_queue(self, row: ROW) -> None:
+ if self.queue is None:
+ self.queue = (row.createdon, [row])
+ else:
+ self.queue[1].append(row)
+
+ def _attempt_queue_flush(self, row: ROW) -> Iterable[ROW]:
+ if self.queue is None:
+ return
+
+ if row.createdon > self.queue[0]:
+ yield from self._flush_queue()
+
+ def _flush_queue(self) -> Iterable[ROW]:
+ if self.queue is not None:
+ yield from self.queue[1]
+ self.queue = None
+
+
class DataHubDatabaseReader:
def __init__(
self,
@@ -40,13 +98,14 @@ def query(self) -> str:
# Offset is generally 0, unless we repeat the same createdon twice
# Ensures stable order, chronological per (urn, aspect)
- # Version 0 last, only when createdon is the same. Otherwise relies on createdon order
+ # Relies on createdon order to reflect version order
+ # Ordering of entries with the same createdon is handled by VersionOrderer
return f"""
- SELECT urn, aspect, metadata, systemmetadata, createdon
+ SELECT urn, aspect, metadata, systemmetadata, createdon, version
FROM {self.engine.dialect.identifier_preparer.quote(self.config.database_table_name)}
WHERE createdon >= %(since_createdon)s
{"" if self.config.include_all_versions else "AND version = 0"}
- ORDER BY createdon, urn, aspect, CASE WHEN version = 0 THEN 1 ELSE 0 END, version
+ ORDER BY createdon, urn, aspect, version
LIMIT %(limit)s
OFFSET %(offset)s
"""
@@ -54,6 +113,14 @@ def query(self) -> str:
def get_aspects(
self, from_createdon: datetime, stop_time: datetime
) -> Iterable[Tuple[MetadataChangeProposalWrapper, datetime]]:
+ orderer = VersionOrderer[Row](enabled=self.config.include_all_versions)
+ rows = self._get_rows(from_createdon=from_createdon, stop_time=stop_time)
+ for row in orderer(rows):
+ mcp = self._parse_row(row)
+ if mcp:
+ yield mcp, row.createdon
+
+ def _get_rows(self, from_createdon: datetime, stop_time: datetime) -> Iterable[Row]:
with self.engine.connect() as conn:
ts = from_createdon
offset = 0
@@ -69,34 +136,31 @@ def get_aspects(
return
for i, row in enumerate(rows):
- row_dict = row._asdict()
- mcp = self._parse_row(row_dict)
- if mcp:
- yield mcp, row_dict["createdon"]
+ yield row
- if ts == row_dict["createdon"]:
- offset += i
+ if ts == row.createdon:
+ offset += i + 1
else:
- ts = row_dict["createdon"]
+ ts = row.createdon
offset = 0
- def _parse_row(self, d: Dict) -> Optional[MetadataChangeProposalWrapper]:
+ def _parse_row(self, row: Row) -> Optional[MetadataChangeProposalWrapper]:
try:
- json_aspect = post_json_transform(json.loads(d["metadata"]))
- json_metadata = post_json_transform(json.loads(d["systemmetadata"] or "{}"))
+ json_aspect = post_json_transform(json.loads(row.metadata))
+ json_metadata = post_json_transform(json.loads(row.systemmetadata or "{}"))
system_metadata = SystemMetadataClass.from_obj(json_metadata)
return MetadataChangeProposalWrapper(
- entityUrn=d["urn"],
- aspect=ASPECT_MAP[d["aspect"]].from_obj(json_aspect),
+ entityUrn=row.urn,
+ aspect=ASPECT_MAP[row.aspect].from_obj(json_aspect),
systemMetadata=system_metadata,
changeType=ChangeTypeClass.UPSERT,
)
except Exception as e:
logger.warning(
- f"Failed to parse metadata for {d['urn']}: {e}", exc_info=True
+ f"Failed to parse metadata for {row.urn}: {e}", exc_info=True
)
self.report.num_database_parse_errors += 1
self.report.database_parse_errors.setdefault(
str(e), LossyDict()
- ).setdefault(d["aspect"], LossyList()).append(d["urn"])
+ ).setdefault(row.aspect, LossyList()).append(row.urn)
return None
diff --git a/metadata-ingestion/tests/unit/test_datahub_source.py b/metadata-ingestion/tests/unit/test_datahub_source.py
new file mode 100644
index 0000000000000..adc131362b326
--- /dev/null
+++ b/metadata-ingestion/tests/unit/test_datahub_source.py
@@ -0,0 +1,51 @@
+from dataclasses import dataclass
+
+import pytest
+
+from datahub.ingestion.source.datahub.datahub_database_reader import (
+ VersionOrderable,
+ VersionOrderer,
+)
+
+
+@dataclass
+class MockRow(VersionOrderable):
+ createdon: int
+ version: int
+ urn: str
+
+
+@pytest.fixture
+def rows():
+ return [
+ MockRow(0, 0, "one"),
+ MockRow(0, 1, "one"),
+ MockRow(0, 0, "two"),
+ MockRow(0, 0, "three"),
+ MockRow(0, 1, "three"),
+ MockRow(0, 2, "three"),
+ MockRow(0, 1, "two"),
+ MockRow(0, 4, "three"),
+ MockRow(0, 5, "three"),
+ MockRow(1, 6, "three"),
+ MockRow(1, 0, "four"),
+ MockRow(2, 0, "five"),
+ MockRow(2, 1, "six"),
+ MockRow(2, 0, "six"),
+ MockRow(3, 0, "seven"),
+ MockRow(3, 0, "eight"),
+ ]
+
+
+def test_version_orderer(rows):
+ orderer = VersionOrderer[MockRow](enabled=True)
+ ordered_rows = list(orderer(rows))
+ assert ordered_rows == sorted(
+ ordered_rows, key=lambda x: (x.createdon, x.version == 0)
+ )
+
+
+def test_version_orderer_disabled(rows):
+ orderer = VersionOrderer[MockRow](enabled=False)
+ ordered_rows = list(orderer(rows))
+ assert ordered_rows == rows
From f2ce3ab62cc29bd0d4d4cade2577a50a39fa0f32 Mon Sep 17 00:00:00 2001
From: david-leifker <114954101+david-leifker@users.noreply.github.com>
Date: Mon, 6 Nov 2023 15:19:55 -0600
Subject: [PATCH 05/10] lint(frontend): fix HeaderLinks lint error (#9189)
---
.../src/app/shared/admin/HeaderLinks.tsx | 28 +++++++++----------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx
index 3f46f35889fd1..4a7a4938ea970 100644
--- a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx
+++ b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx
@@ -105,20 +105,20 @@ export function HeaderLinks(props: Props) {
View and modify your data dictionary
-
+
}
>
From 34aa08b7f38d733adcfe31ca97131e1ea52b49e6 Mon Sep 17 00:00:00 2001
From: John Joyce
Date: Mon, 6 Nov 2023 16:51:05 -0800
Subject: [PATCH 06/10] refactor(ui): Refactor entity page loading indicators
(#9195)
unrelated smoke test failing.
---
.../src/app/entity/EntityPage.tsx | 4 +-
.../containers/profile/EntityProfile.tsx | 3 --
.../profile/header/EntityHeader.tsx | 46 +++++++++++--------
.../header/EntityHeaderLoadingSection.tsx | 29 ++++++++++++
.../src/app/lineage/LineageExplorer.tsx | 7 +--
.../src/app/lineage/LineageLoadingSection.tsx | 27 +++++++++++
6 files changed, 86 insertions(+), 30 deletions(-)
create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx
create mode 100644 datahub-web-react/src/app/lineage/LineageLoadingSection.tsx
diff --git a/datahub-web-react/src/app/entity/EntityPage.tsx b/datahub-web-react/src/app/entity/EntityPage.tsx
index 09233dbd89f69..916fa41795412 100644
--- a/datahub-web-react/src/app/entity/EntityPage.tsx
+++ b/datahub-web-react/src/app/entity/EntityPage.tsx
@@ -8,7 +8,6 @@ import { useEntityRegistry } from '../useEntityRegistry';
import analytics, { EventType } from '../analytics';
import { decodeUrn } from './shared/utils';
import { useGetGrantedPrivilegesQuery } from '../../graphql/policy.generated';
-import { Message } from '../shared/Message';
import { UnauthorizedPage } from '../authorization/UnauthorizedPage';
import { ErrorSection } from '../shared/error/ErrorSection';
import { VIEW_ENTITY_PAGE } from './shared/constants';
@@ -34,7 +33,7 @@ export const EntityPage = ({ entityType }: Props) => {
const isLineageSupported = entity.isLineageEnabled();
const isLineageMode = useIsLineageMode();
const authenticatedUserUrn = useUserContext()?.user?.urn;
- const { loading, error, data } = useGetGrantedPrivilegesQuery({
+ const { error, data } = useGetGrantedPrivilegesQuery({
variables: {
input: {
actorUrn: authenticatedUserUrn as string,
@@ -71,7 +70,6 @@ export const EntityPage = ({ entityType }: Props) => {
return (
<>
- {loading && }
{error && }
{data && !canViewEntityPage && }
{canViewEntityPage &&
diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx
index 5384eb94429ed..74c127cb05dd9 100644
--- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx
+++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx
@@ -4,7 +4,6 @@ import { MutationHookOptions, MutationTuple, QueryHookOptions, QueryResult } fro
import styled from 'styled-components/macro';
import { useHistory } from 'react-router';
import { EntityType, Exact } from '../../../../../types.generated';
-import { Message } from '../../../../shared/Message';
import {
getEntityPath,
getOnboardingStepIdsForEntityType,
@@ -274,7 +273,6 @@ export const EntityProfile = ({
}}
>
<>
- {loading && }
{(error && ) ||
(!loading && (
@@ -323,7 +321,6 @@ export const EntityProfile = ({
banner
/>
)}
- {loading && }
{(error && ) || (
{isLineageMode ? (
diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx
index 97595a515b34d..69389f5dcf6fc 100644
--- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx
+++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx
@@ -16,6 +16,7 @@ import ShareButton from '../../../../../shared/share/ShareButton';
import { capitalizeFirstLetterOnly } from '../../../../../shared/textUtil';
import { useUserContext } from '../../../../../context/useUserContext';
import { useEntityRegistry } from '../../../../../useEntityRegistry';
+import EntityHeaderLoadingSection from './EntityHeaderLoadingSection';
const TitleWrapper = styled.div`
display: flex;
@@ -81,7 +82,7 @@ type Props = {
};
export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEditable, subHeader }: Props) => {
- const { urn, entityType, entityData } = useEntityData();
+ const { urn, entityType, entityData, loading } = useEntityData();
const refetch = useRefetch();
const me = useUserContext();
const platformName = getPlatformName(entityData);
@@ -99,25 +100,32 @@ export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEdi
<>
-
-
-
- {entityData?.deprecation?.deprecated && (
-
- )}
- {entityData?.health && (
- ) || (
+ <>
+
+
+
+ {entityData?.deprecation?.deprecated && (
+
+ )}
+ {entityData?.health && (
+
+ )}
+
+
- )}
-
-
+ >
+ )}
diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx
new file mode 100644
index 0000000000000..bbf813804edd4
--- /dev/null
+++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx
@@ -0,0 +1,29 @@
+import * as React from 'react';
+import { Skeleton, Space } from 'antd';
+import styled from 'styled-components';
+import { ANTD_GRAY } from '../../../constants';
+
+const ContextSkeleton = styled(Skeleton.Input)`
+ && {
+ width: 320px;
+ border-radius: 4px;
+ background-color: ${ANTD_GRAY[3]};
+ }
+`;
+
+const NameSkeleton = styled(Skeleton.Input)`
+ && {
+ width: 240px;
+ border-radius: 4px;
+ background-color: ${ANTD_GRAY[3]};
+ }
+`;
+
+export default function EntityHeaderLoadingSection() {
+ return (
+
+
+
+
+ );
+}
diff --git a/datahub-web-react/src/app/lineage/LineageExplorer.tsx b/datahub-web-react/src/app/lineage/LineageExplorer.tsx
index ed0b26bde11ef..f59d1843b8a99 100644
--- a/datahub-web-react/src/app/lineage/LineageExplorer.tsx
+++ b/datahub-web-react/src/app/lineage/LineageExplorer.tsx
@@ -3,7 +3,6 @@ import { useHistory } from 'react-router';
import { Button, Drawer } from 'antd';
import { InfoCircleOutlined } from '@ant-design/icons';
import styled from 'styled-components';
-import { Message } from '../shared/Message';
import { useEntityRegistry } from '../useEntityRegistry';
import CompactContext from '../shared/CompactContext';
import { EntityAndType, EntitySelectParams, FetchedEntities } from './types';
@@ -18,12 +17,10 @@ import { ErrorSection } from '../shared/error/ErrorSection';
import usePrevious from '../shared/usePrevious';
import { useGetLineageTimeParams } from './utils/useGetLineageTimeParams';
import analytics, { EventType } from '../analytics';
+import LineageLoadingSection from './LineageLoadingSection';
const DEFAULT_DISTANCE_FROM_TOP = 106;
-const LoadingMessage = styled(Message)`
- margin-top: 10%;
-`;
const FooterButtonGroup = styled.div`
display: flex;
justify-content: space-between;
@@ -167,7 +164,7 @@ export default function LineageExplorer({ urn, type }: Props) {
return (
<>
{error && }
- {loading && }
+ {loading && }
{!!data && (