From dbfc8cc177fc4400a312090218892272206f6435 Mon Sep 17 00:00:00 2001 From: open-metadata Date: Wed, 20 Dec 2023 12:51:50 +0000 Subject: [PATCH] See https://github.com/open-metadata/OpenMetadata/commit/1dc79bfd3cc134f9cfb449f774238cfd07a805f1 from refs/heads/main --- content/v1.1.x/connectors/database/athena/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/azuresql/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/db2/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/deltalake/yaml.md | 9 +++++---- .../v1.1.x/connectors/database/domo-database/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/dynamodb/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/hive/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/impala/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/index.md | 2 +- content/v1.1.x/connectors/database/mariadb/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/mongodb/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/mysql/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/oracle/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/pinotdb/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/presto/yaml.md | 9 +++++---- .../v1.1.x/connectors/database/salesforce/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/sap-hana/yaml.md | 9 +++++---- .../v1.1.x/connectors/database/singlestore/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/trino/yaml.md | 9 +++++---- content/v1.1.x/connectors/database/vertica/yaml.md | 9 +++++---- content/v1.1.x/connectors/index.md | 1 + .../v1.1.x/connectors/ingestion/deployment/index.md | 4 ++-- content/v1.1.x/connectors/ingestion/lineage/index.md | 12 ++++++------ content/v1.1.x/connectors/ml-model/mlflow/yaml.md | 2 +- content/v1.1.x/connectors/ml-model/sagemaker/yaml.md | 4 ++-- content/v1.1.x/connectors/storage/s3/yaml.md | 9 +++++---- content/v1.1.x/deployment/bare-metal/index.md | 2 +- content/v1.1.x/deployment/docker/index.md | 2 +- content/v1.1.x/deployment/security/index.md | 2 +- .../v1.1.x/deployment/security/okta/bare-metal.md | 2 +- content/v1.1.x/deployment/security/okta/docker.md | 2 +- .../v1.1.x/quick-start/local-docker-deployment.md | 2 +- .../quick-start/local-kubernetes-deployment.md | 2 +- content/v1.1.x/quick-start/sandbox.md | 2 +- .../connectors/database/unity-catalog/index.md | 2 ++ .../connectors/database/athena/index.md | 2 +- .../connectors/database/azuresql/yaml.md | 9 +++++---- .../connectors/database/bigquery/index.md | 2 ++ .../connectors/database/bigquery/yaml.md | 1 + .../connectors/database/couchbase/yaml.md | 9 +++++---- .../connectors/database/databricks/index.md | 1 - .../connectors/database/impala/index.md | 6 +++--- .../connectors/database/presto/yaml.md | 9 +++++---- .../connectors/database/unity-catalog/index.md | 2 ++ .../connectors/database/vertica/index.md | 6 +++--- .../connectors/ingestion/deployment/index.md | 6 +++--- .../connectors/ingestion/lineage/index.md | 12 ++++++------ .../connectors/ml-model/mlflow/yaml.md | 2 +- .../connectors/ml-model/sagemaker/yaml.md | 2 +- .../connectors/pipeline/dagster/index.md | 2 +- .../v1.3.x-SNAPSHOT/connectors/storage/s3/yaml.md | 9 +++++---- content/v1.3.x-SNAPSHOT/deployment/docker/index.md | 4 ++-- content/v1.3.x-SNAPSHOT/deployment/security/index.md | 2 +- .../deployment/security/okta/bare-metal.md | 2 +- .../deployment/security/okta/docker.md | 2 +- .../v1.3.x-SNAPSHOT/deployment/upgrade/kubernetes.md | 2 +- .../admin-guide/how-to-ingest-metadata.md | 2 +- content/v1.3.x-SNAPSHOT/how-to-guides/index.md | 2 +- .../how-to-guides/user-guide-data-users/tags.md | 2 +- .../quick-start/local-docker-deployment.md | 2 +- .../quick-start/local-kubernetes-deployment.md | 2 +- content/v1.3.x-SNAPSHOT/quick-start/sandbox.md | 2 +- 62 files changed, 179 insertions(+), 148 deletions(-) diff --git a/content/v1.1.x/connectors/database/athena/yaml.md b/content/v1.1.x/connectors/database/athena/yaml.md index a726f4e85..58557f12c 100644 --- a/content/v1.1.x/connectors/database/athena/yaml.md +++ b/content/v1.1.x/connectors/database/athena/yaml.md @@ -784,9 +784,10 @@ You can learn more about how to ingest lineage [here](/connectors/ingestion/work {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/athena/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/azuresql/yaml.md b/content/v1.1.x/connectors/database/azuresql/yaml.md index 003fe313e..f27df5ab9 100644 --- a/content/v1.1.x/connectors/database/azuresql/yaml.md +++ b/content/v1.1.x/connectors/database/azuresql/yaml.md @@ -499,9 +499,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/azuresql/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/db2/yaml.md b/content/v1.1.x/connectors/database/db2/yaml.md index 171dd37c4..e671d2fec 100644 --- a/content/v1.1.x/connectors/database/db2/yaml.md +++ b/content/v1.1.x/connectors/database/db2/yaml.md @@ -502,9 +502,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/db2/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/deltalake/yaml.md b/content/v1.1.x/connectors/database/deltalake/yaml.md index 41b81a9f4..89272e2f6 100644 --- a/content/v1.1.x/connectors/database/deltalake/yaml.md +++ b/content/v1.1.x/connectors/database/deltalake/yaml.md @@ -282,9 +282,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/deltalake/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/domo-database/yaml.md b/content/v1.1.x/connectors/database/domo-database/yaml.md index 06e8c6c6f..8d9f65a4d 100644 --- a/content/v1.1.x/connectors/database/domo-database/yaml.md +++ b/content/v1.1.x/connectors/database/domo-database/yaml.md @@ -272,9 +272,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/domo-database/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} \ No newline at end of file diff --git a/content/v1.1.x/connectors/database/dynamodb/yaml.md b/content/v1.1.x/connectors/database/dynamodb/yaml.md index 3e94e1df0..e77ff4f89 100644 --- a/content/v1.1.x/connectors/database/dynamodb/yaml.md +++ b/content/v1.1.x/connectors/database/dynamodb/yaml.md @@ -287,9 +287,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/dynamodb/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/hive/yaml.md b/content/v1.1.x/connectors/database/hive/yaml.md index b1eb08e3d..eaaeda6f4 100644 --- a/content/v1.1.x/connectors/database/hive/yaml.md +++ b/content/v1.1.x/connectors/database/hive/yaml.md @@ -521,9 +521,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/hive/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/impala/yaml.md b/content/v1.1.x/connectors/database/impala/yaml.md index d43b25742..87aaf5c05 100644 --- a/content/v1.1.x/connectors/database/impala/yaml.md +++ b/content/v1.1.x/connectors/database/impala/yaml.md @@ -480,9 +480,10 @@ link="/connectors/ingestion/workflows/dbt" /%} {% tilesContainer %} {% tile -title="Ingest with Airflow" -description="Configure the ingestion using Airflow SDK" -link="/connectors/database/impala/airflow" -/ %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/index.md b/content/v1.1.x/connectors/database/index.md index 0599b0dbe..c56d2695d 100644 --- a/content/v1.1.x/connectors/database/index.md +++ b/content/v1.1.x/connectors/database/index.md @@ -27,8 +27,8 @@ This is the supported list of connectors for Database Services: - [Postgres](/connectors/database/postgres) - [Presto](/connectors/database/presto) - [Redshift](/connectors/database/redshift) -- [Sap Hana](/connectors/database/saphana) - [Salesforce](/connectors/database/salesforce) +- [Sap Hana](/connectors/database/sap-hana) - [SingleStore](/connectors/database/singlestore) - [Snowflake](/connectors/database/snowflake) - [Trino](/connectors/database/trino) diff --git a/content/v1.1.x/connectors/database/mariadb/yaml.md b/content/v1.1.x/connectors/database/mariadb/yaml.md index ebd4fdb3a..5ebe37512 100644 --- a/content/v1.1.x/connectors/database/mariadb/yaml.md +++ b/content/v1.1.x/connectors/database/mariadb/yaml.md @@ -482,9 +482,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/mariadb/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/mongodb/yaml.md b/content/v1.1.x/connectors/database/mongodb/yaml.md index db882e0bc..5df06d728 100644 --- a/content/v1.1.x/connectors/database/mongodb/yaml.md +++ b/content/v1.1.x/connectors/database/mongodb/yaml.md @@ -233,9 +233,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/mongodb/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/mysql/yaml.md b/content/v1.1.x/connectors/database/mysql/yaml.md index 479aad389..7ca0ef7a1 100644 --- a/content/v1.1.x/connectors/database/mysql/yaml.md +++ b/content/v1.1.x/connectors/database/mysql/yaml.md @@ -605,9 +605,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/mysql/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/oracle/yaml.md b/content/v1.1.x/connectors/database/oracle/yaml.md index bf3c5c41c..1be404f0d 100644 --- a/content/v1.1.x/connectors/database/oracle/yaml.md +++ b/content/v1.1.x/connectors/database/oracle/yaml.md @@ -526,9 +526,10 @@ You can learn more about how to ingest lineage [here](/connectors/ingestion/work {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/oracle/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/pinotdb/yaml.md b/content/v1.1.x/connectors/database/pinotdb/yaml.md index ea61a8fbf..51dffe7d7 100644 --- a/content/v1.1.x/connectors/database/pinotdb/yaml.md +++ b/content/v1.1.x/connectors/database/pinotdb/yaml.md @@ -524,9 +524,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/pinotdb/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/presto/yaml.md b/content/v1.1.x/connectors/database/presto/yaml.md index e9d5de3ae..2e7120ee6 100644 --- a/content/v1.1.x/connectors/database/presto/yaml.md +++ b/content/v1.1.x/connectors/database/presto/yaml.md @@ -484,9 +484,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/presto/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/salesforce/yaml.md b/content/v1.1.x/connectors/database/salesforce/yaml.md index 221a06fc1..9e7b74458 100644 --- a/content/v1.1.x/connectors/database/salesforce/yaml.md +++ b/content/v1.1.x/connectors/database/salesforce/yaml.md @@ -257,9 +257,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/salesforce/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/sap-hana/yaml.md b/content/v1.1.x/connectors/database/sap-hana/yaml.md index a0b43b3c2..cb642ae68 100644 --- a/content/v1.1.x/connectors/database/sap-hana/yaml.md +++ b/content/v1.1.x/connectors/database/sap-hana/yaml.md @@ -513,9 +513,10 @@ Note how instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/sap-hana/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/singlestore/yaml.md b/content/v1.1.x/connectors/database/singlestore/yaml.md index 05e88b398..786d40e05 100644 --- a/content/v1.1.x/connectors/database/singlestore/yaml.md +++ b/content/v1.1.x/connectors/database/singlestore/yaml.md @@ -476,9 +476,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/singlestore/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/database/trino/yaml.md b/content/v1.1.x/connectors/database/trino/yaml.md index 0f5719233..d18b1275e 100644 --- a/content/v1.1.x/connectors/database/trino/yaml.md +++ b/content/v1.1.x/connectors/database/trino/yaml.md @@ -559,9 +559,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/trino/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} \ No newline at end of file diff --git a/content/v1.1.x/connectors/database/vertica/yaml.md b/content/v1.1.x/connectors/database/vertica/yaml.md index d20227004..caf8c8a09 100644 --- a/content/v1.1.x/connectors/database/vertica/yaml.md +++ b/content/v1.1.x/connectors/database/vertica/yaml.md @@ -521,9 +521,10 @@ Note now instead of running `ingest`, we are using the `profile` command to sele {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/vertica/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/connectors/index.md b/content/v1.1.x/connectors/index.md index ac1493be3..b993e2bb0 100644 --- a/content/v1.1.x/connectors/index.md +++ b/content/v1.1.x/connectors/index.md @@ -52,6 +52,7 @@ the following docs to run the Ingestion Framework in any orchestrator externally - [PinotDB](/connectors/database/pinotdb) - [Redshift](/connectors/database/redshift) - [Salesforce](/connectors/database/salesforce) +- [Sap Hana](/connectors/database/sap-hana) - [SingleStore](/connectors/database/singlestore) - [Snowflake](/connectors/database/snowflake) - [SQLite](/connectors/database/sqlite) diff --git a/content/v1.1.x/connectors/ingestion/deployment/index.md b/content/v1.1.x/connectors/ingestion/deployment/index.md index cb1bf3436..7482639da 100644 --- a/content/v1.1.x/connectors/ingestion/deployment/index.md +++ b/content/v1.1.x/connectors/ingestion/deployment/index.md @@ -62,8 +62,8 @@ While the endpoints are directly defined in the `IngestionPipelineResource`, the that decouples how OpenMetadata communicates with the Orchestrator, as different external systems will need different calls and data to be sent. -- You can find the `PipelineServiceClient` abstraction [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/util/PipelineServiceClient.java), -- And the `AirflowRESTClient` implementation [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/airflow/AirflowRESTClient.java). +- You can find the `PipelineServiceClient` abstraction [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java), +- And the `AirflowRESTClient` implementation [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/clients/pipeline/airflow/AirflowRESTClient.java). The clients that implement the abstractions from the `PipelineServiceClient` are merely a translation layer between the information received in the shape of an `IngestionPipeline` Entity, and the specific requirements of each Orchestrator. diff --git a/content/v1.1.x/connectors/ingestion/lineage/index.md b/content/v1.1.x/connectors/ingestion/lineage/index.md index ce734c004..87deeb3d0 100644 --- a/content/v1.1.x/connectors/ingestion/lineage/index.md +++ b/content/v1.1.x/connectors/ingestion/lineage/index.md @@ -119,12 +119,12 @@ as well). You might also need to validate if the query logs are available in the You can check the queries being used here: -- [BigQuery](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L428) -- [Snowflake](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L197) -- [MSSQL](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L350) -- [Redshift](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L18) -- [Clickhouse](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L376) -- [Postgres](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L467) +- [BigQuery](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py) +- [Snowflake](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/snowflake/queries.py) +- [MSSQL](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/mssql/queries.py) +- [Redshift](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/redshift/queries.py) +- [Clickhouse](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/clickhouse/queries.py) +- [Postgres](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/postgres/queries.py) By default, we apply a result limit of 1000 records. You might also need to increase that for databases with big volumes of queries. diff --git a/content/v1.1.x/connectors/ml-model/mlflow/yaml.md b/content/v1.1.x/connectors/ml-model/mlflow/yaml.md index 5b66c8eb8..3445aef81 100644 --- a/content/v1.1.x/connectors/ml-model/mlflow/yaml.md +++ b/content/v1.1.x/connectors/ml-model/mlflow/yaml.md @@ -41,7 +41,7 @@ the steps to create a YAML configuration able to connect to the source, process the Entities if needed, and reach the OpenMetadata server. The workflow is modeled around the following -[JSON Schema](https://github.com/open-metadata/OpenMetadatablob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json) +[JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/mlmodelServiceMetadataPipeline.json) ### 1. Define the YAML Config diff --git a/content/v1.1.x/connectors/ml-model/sagemaker/yaml.md b/content/v1.1.x/connectors/ml-model/sagemaker/yaml.md index ed2ce1bfe..21d31c25f 100644 --- a/content/v1.1.x/connectors/ml-model/sagemaker/yaml.md +++ b/content/v1.1.x/connectors/ml-model/sagemaker/yaml.md @@ -56,7 +56,7 @@ pip3 install "openmetadata-ingestion[sagemaker]" ## Metadata Ingestion All connectors are defined as JSON Schemas. -[Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/mlmodel/sagemakerConnection.json) +[Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/mlmodel/sageMakerConnection.json) you can find the structure to create a connection to Sagemaker. In order to create and run a Metadata Ingestion workflow, we will follow @@ -64,7 +64,7 @@ the steps to create a YAML configuration able to connect to the source, process the Entities if needed, and reach the OpenMetadata server. The workflow is modeled around the following -[JSON Schema](https://github.com/open-metadata/OpenMetadatablob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json) +[JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/mlmodelServiceMetadataPipeline.json) ### 1. Define the YAML Config diff --git a/content/v1.1.x/connectors/storage/s3/yaml.md b/content/v1.1.x/connectors/storage/s3/yaml.md index c2a019b35..1f0fa10cf 100644 --- a/content/v1.1.x/connectors/storage/s3/yaml.md +++ b/content/v1.1.x/connectors/storage/s3/yaml.md @@ -338,9 +338,10 @@ you will be able to extract metadata from different sources. {% tilesContainer %} {% tile -title="Ingest with Airflow" -description="Configure the ingestion using Airflow SDK" -link="/connectors/database/athena/airflow" -/ %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.1.x/deployment/bare-metal/index.md b/content/v1.1.x/deployment/bare-metal/index.md index a48dac718..7dae8c7fd 100644 --- a/content/v1.1.x/deployment/bare-metal/index.md +++ b/content/v1.1.x/deployment/bare-metal/index.md @@ -237,7 +237,7 @@ installation. ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. \ No newline at end of file diff --git a/content/v1.1.x/deployment/docker/index.md b/content/v1.1.x/deployment/docker/index.md index bfd12a7ee..41784749f 100644 --- a/content/v1.1.x/deployment/docker/index.md +++ b/content/v1.1.x/deployment/docker/index.md @@ -367,7 +367,7 @@ installation. ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.1.x/deployment/security/index.md b/content/v1.1.x/deployment/security/index.md index 2262c9469..773a1dfab 100644 --- a/content/v1.1.x/deployment/security/index.md +++ b/content/v1.1.x/deployment/security/index.md @@ -11,7 +11,7 @@ OpenMetadata has support for Google SSO, Okta SSO, custom OIDC, Auth0, Azure SSO Enabling Security is only required for your **Production** installation. If you are testing OpenMetadata, it will be easier and faster to set up without security. To get up and running quickly with OpenMetadata (without security), -please follow the [Quickstart](/quick-start/local-deployment) guide. +please follow the [Quickstart](/quick-start) guide. {%inlineCalloutContainer%} {%inlineCallout diff --git a/content/v1.1.x/deployment/security/okta/bare-metal.md b/content/v1.1.x/deployment/security/okta/bare-metal.md index a9792051a..498cbf72b 100644 --- a/content/v1.1.x/deployment/security/okta/bare-metal.md +++ b/content/v1.1.x/deployment/security/okta/bare-metal.md @@ -66,5 +66,5 @@ airflowConfiguration: metadataApiEndpoint: ${SERVER_HOST_API_URL:-http://localhost:8585/api} ``` -**Note:** Follow [this](/how-to-guides/feature-configurations/bots) guide to configure the `ingestion-bot` credentials for +**Note:** Follow [this](/how-to-guides/quick-start-guide-for-admins/bots) guide to configure the `ingestion-bot` credentials for ingesting data from Airflow. \ No newline at end of file diff --git a/content/v1.1.x/deployment/security/okta/docker.md b/content/v1.1.x/deployment/security/okta/docker.md index 7a9e524c4..1f0928377 100644 --- a/content/v1.1.x/deployment/security/okta/docker.md +++ b/content/v1.1.x/deployment/security/okta/docker.md @@ -74,7 +74,7 @@ AUTHENTICATION_CLIENT_ID={CLIENT_ID - SPA APP} # Update with your Client ID AUTHENTICATION_CALLBACK_URL=http://localhost:8585/callback ``` -**Note:** Follow [this](/how-to-guides/feature-configurations/bots) guide to configure the `ingestion-bot` credentials for +**Note:** Follow [this](/how-to-guides/quick-start-guide-for-admins/bots) guide to configure the `ingestion-bot` credentials for ingesting data from Airflow. ## 2. Start Docker diff --git a/content/v1.1.x/quick-start/local-docker-deployment.md b/content/v1.1.x/quick-start/local-docker-deployment.md index 175d67753..69e1959c8 100644 --- a/content/v1.1.x/quick-start/local-docker-deployment.md +++ b/content/v1.1.x/quick-start/local-docker-deployment.md @@ -254,7 +254,7 @@ installation. ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. \ No newline at end of file diff --git a/content/v1.1.x/quick-start/local-kubernetes-deployment.md b/content/v1.1.x/quick-start/local-kubernetes-deployment.md index ded99232e..15aa6541a 100644 --- a/content/v1.1.x/quick-start/local-kubernetes-deployment.md +++ b/content/v1.1.x/quick-start/local-kubernetes-deployment.md @@ -187,7 +187,7 @@ For more information, visit the kubectl logs command line reference documentatio ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.1.x/quick-start/sandbox.md b/content/v1.1.x/quick-start/sandbox.md index b32f06e55..456635634 100644 --- a/content/v1.1.x/quick-start/sandbox.md +++ b/content/v1.1.x/quick-start/sandbox.md @@ -34,7 +34,7 @@ alt="tour" /%} ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.2.x/connectors/database/unity-catalog/index.md b/content/v1.2.x/connectors/database/unity-catalog/index.md index fc0eb2fe2..25576a4b9 100644 --- a/content/v1.2.x/connectors/database/unity-catalog/index.md +++ b/content/v1.2.x/connectors/database/unity-catalog/index.md @@ -41,6 +41,8 @@ Configure and schedule Unity Catalog metadata workflow from the OpenMetadata UI: {% partial file="/v1.2/connectors/external-ingestion-deployment.md" /%} +## Requirements + {%inlineCallout icon="description" bold="OpenMetadata 0.12 or later" href="/deployment"%} To deploy OpenMetadata, check the Deployment guides. {%/inlineCallout%} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/athena/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/athena/index.md index dc545b500..7fba3121b 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/athena/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/athena/index.md @@ -33,7 +33,7 @@ Configure and schedule Athena metadata and profiler workflows from the OpenMetad - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) - [Service Name](#service-name) - - [Connection Options](#connection-options) + - [Connection Details](#connection-details) - [Metadata Ingestion Options](#metadata-ingestion-options) - [Troubleshooting](#troubleshooting) - [Workflow Deployment Error](#workflow-deployment-error) diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/azuresql/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/database/azuresql/yaml.md index 335e61009..a2cbb83ff 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/azuresql/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/azuresql/yaml.md @@ -207,9 +207,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/azuresql/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/index.md index 7c130c1fd..a7cfdd39c 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/index.md @@ -47,6 +47,8 @@ Configure and schedule BigQuery metadata and profiler workflows from the OpenMet To deploy OpenMetadata, check the Deployment guides. {%/inlineCallout%} +## Requirements + ### Data Catalog API Permissions - Go to [https://console.cloud.google.com/apis/library/datacatalog.googleapis.com](https://console.cloud.google.com/apis/library/datacatalog.googleapis.com) diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/yaml.md index 945485441..486c6f11d 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/bigquery/yaml.md @@ -98,6 +98,7 @@ link="/connectors/database/bigquery/roles" / %} {% /tilesContainer %} +## Metadata Ingestion ### 1. Define the YAML Config diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/couchbase/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/database/couchbase/yaml.md index 96182f499..fbc1dd6f4 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/couchbase/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/couchbase/yaml.md @@ -156,9 +156,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/mongodb/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/databricks/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/databricks/index.md index 6a36249aa..b472f9095 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/databricks/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/databricks/index.md @@ -30,7 +30,6 @@ In this section, we provide guides and references to use the Databricks connecto Configure and schedule Databricks metadata and profiler workflows from the OpenMetadata UI: -- [Requirements](#requirements) - [Unity Catalog](#unity-catalog) - [Metadata Ingestion](#metadata-ingestion) - [Query Usage](/connectors/ingestion/workflows/usage) diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/impala/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/impala/index.md index d678e965d..236c820b4 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/impala/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/impala/index.md @@ -31,9 +31,9 @@ In this section, we provide guides and references to use the Impala connector. Configure and schedule Impala metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) -- [Data Profiler](#data-profiler) -- [Data Quality](#data-quality) -- [dbt Integration](#dbt-integration) +- [Data Profiler](/connectors/ingestion/workflows/profiler) +- [Data Quality](/connectors/ingestion/workflows/data-quality) +- [dbt Integration](/connectors/ingestion/workflows/dbt) {% partial file="/v1.3/connectors/ingestion-modes-tiles.md" variables={yamlPath: "/connectors/database/impala/yaml"} /%} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/presto/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/database/presto/yaml.md index b85064b35..0318c1a8f 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/presto/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/presto/yaml.md @@ -195,9 +195,10 @@ source: {% tilesContainer %} {% tile - title="Ingest with Airflow" - description="Configure the ingestion using Airflow SDK" - link="/connectors/database/presto/airflow" - / %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/unity-catalog/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/unity-catalog/index.md index ef59c25d6..bd6ba5925 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/unity-catalog/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/unity-catalog/index.md @@ -41,6 +41,8 @@ Configure and schedule Unity Catalog metadata workflow from the OpenMetadata UI: {% partial file="/v1.3/connectors/external-ingestion-deployment.md" /%} +## Requirements + {%inlineCallout icon="description" bold="OpenMetadata 0.12 or later" href="/deployment"%} To deploy OpenMetadata, check the Deployment guides. {%/inlineCallout%} diff --git a/content/v1.3.x-SNAPSHOT/connectors/database/vertica/index.md b/content/v1.3.x-SNAPSHOT/connectors/database/vertica/index.md index 0684adb3d..ed663c16b 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/database/vertica/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/database/vertica/index.md @@ -33,9 +33,9 @@ Configure and schedule Vertica metadata and profiler workflows from the OpenMeta - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) -- [Data Profiler](#data-profiler) -- [Data Quality](#data-quality) -- [dbt Integration](#dbt-integration) +- [Data Profiler](/connectors/ingestion/workflows/profiler) +- [Data Quality](/connectors/ingestion/workflows/data-quality) +- [dbt Integration](/connectors/ingestion/workflows/dbt) {% partial file="/v1.3/connectors/ingestion-modes-tiles.md" variables={yamlPath: "/connectors/database/vertica/yaml"} /%} diff --git a/content/v1.3.x-SNAPSHOT/connectors/ingestion/deployment/index.md b/content/v1.3.x-SNAPSHOT/connectors/ingestion/deployment/index.md index 43deca8a3..42d1db7dd 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/ingestion/deployment/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/ingestion/deployment/index.md @@ -62,8 +62,8 @@ While the endpoints are directly defined in the `IngestionPipelineResource`, the that decouples how OpenMetadata communicates with the Orchestrator, as different external systems will need different calls and data to be sent. -- You can find the `PipelineServiceClient` abstraction [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/util/PipelineServiceClient.java), -- And the `AirflowRESTClient` implementation [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/airflow/AirflowRESTClient.java). +- You can find the `PipelineServiceClient` abstraction [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java), +- And the `AirflowRESTClient` implementation [here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/clients/pipeline/airflow/AirflowRESTClient.java). The clients that implement the abstractions from the `PipelineServiceClient` are merely a translation layer between the information received in the shape of an `IngestionPipeline` Entity, and the specific requirements of each Orchestrator. @@ -284,7 +284,7 @@ pipelineServiceClient.deployPipeline(ingestionPipeline); ``` Then, the actual deployment logic is handled by the class implementing the Pipeline Service Client. For this example, -it will be the [AirflowRESTClient](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/airflow/AirflowRESTClient.java). +it will be the [AirflowRESTClient](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-service/src/main/java/org/openmetadata/service/clients/pipeline/airflow/AirflowRESTClient.java). First, let's see what it is needed to instantiate the Airflow REST Client: diff --git a/content/v1.3.x-SNAPSHOT/connectors/ingestion/lineage/index.md b/content/v1.3.x-SNAPSHOT/connectors/ingestion/lineage/index.md index 34c9d2a78..6f21e09c6 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/ingestion/lineage/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/ingestion/lineage/index.md @@ -119,12 +119,12 @@ as well). You might also need to validate if the query logs are available in the You can check the queries being used here: -- [BigQuery](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L428) -- [Snowflake](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L197) -- [MSSQL](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L350) -- [Redshift](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L18) -- [Clickhouse](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L376) -- [Postgres](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/utils/sql_queries.py#L467) +- [BigQuery](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py) +- [Snowflake](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/snowflake/queries.py) +- [MSSQL](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/mssql/queries.py) +- [Redshift](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/redshift/queries.py) +- [Clickhouse](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/clickhouse/queries.py) +- [Postgres](https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/src/metadata/ingestion/source/database/postgres/queries.py) By default, we apply a result limit of 1000 records. You might also need to increase that for databases with big volumes of queries. diff --git a/content/v1.3.x-SNAPSHOT/connectors/ml-model/mlflow/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/ml-model/mlflow/yaml.md index b5e92546a..245815fdd 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/ml-model/mlflow/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/ml-model/mlflow/yaml.md @@ -41,7 +41,7 @@ the steps to create a YAML configuration able to connect to the source, process the Entities if needed, and reach the OpenMetadata server. The workflow is modeled around the following -[JSON Schema](https://github.com/open-metadata/OpenMetadatablob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json) +[JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/mlmodelServiceMetadataPipeline.json) ### 1. Define the YAML Config diff --git a/content/v1.3.x-SNAPSHOT/connectors/ml-model/sagemaker/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/ml-model/sagemaker/yaml.md index 006d19c1b..65a672d72 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/ml-model/sagemaker/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/ml-model/sagemaker/yaml.md @@ -64,7 +64,7 @@ the steps to create a YAML configuration able to connect to the source, process the Entities if needed, and reach the OpenMetadata server. The workflow is modeled around the following -[JSON Schema](https://github.com/open-metadata/OpenMetadatablob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json) +[JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/mlmodelServiceMetadataPipeline.json) ### 1. Define the YAML Config diff --git a/content/v1.3.x-SNAPSHOT/connectors/pipeline/dagster/index.md b/content/v1.3.x-SNAPSHOT/connectors/pipeline/dagster/index.md index d660d4aab..0b5b2d065 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/pipeline/dagster/index.md +++ b/content/v1.3.x-SNAPSHOT/connectors/pipeline/dagster/index.md @@ -14,7 +14,7 @@ Configure and schedule Dagster metadata and profiler workflows from the OpenMeta - [Dagster Versions](#dagster-versions) - [Metadata Ingestion](#metadata-ingestion) - [Service Name](#service-name) - - [Connection Options](#connection-options) + - [Connection Details](#connection-details) - [Metadata Ingestion Options](#metadata-ingestion-options) - [Troubleshooting](#troubleshooting) - [Workflow Deployment Error](#workflow-deployment-error) diff --git a/content/v1.3.x-SNAPSHOT/connectors/storage/s3/yaml.md b/content/v1.3.x-SNAPSHOT/connectors/storage/s3/yaml.md index ee22c1abe..264c23ecc 100644 --- a/content/v1.3.x-SNAPSHOT/connectors/storage/s3/yaml.md +++ b/content/v1.3.x-SNAPSHOT/connectors/storage/s3/yaml.md @@ -299,9 +299,10 @@ source: {% tilesContainer %} {% tile -title="Ingest with Airflow" -description="Configure the ingestion using Airflow SDK" -link="/connectors/database/athena/airflow" -/ %} + icon="mediation" + title="Configure Ingestion Externally" + description="Deploy, configure, and manage the ingestion workflows externally." + link="/deployment/ingestion" + / %} {% /tilesContainer %} diff --git a/content/v1.3.x-SNAPSHOT/deployment/docker/index.md b/content/v1.3.x-SNAPSHOT/deployment/docker/index.md index 2b52182c3..20428a57c 100644 --- a/content/v1.3.x-SNAPSHOT/deployment/docker/index.md +++ b/content/v1.3.x-SNAPSHOT/deployment/docker/index.md @@ -107,7 +107,7 @@ wget https://github.com/open-metadata/OpenMetadata/releases/download/1.2.2-relea ### 3. Update Environment Variables required for OpenMetadata Dependencies -In the previous [step](#2-download-docker-compose-file-from-github-release-branch), we download the `docker-compose` file. +In the previous [step](#2-download-docker-compose-file-from-github-releases), we download the `docker-compose` file. Identify and update the environment variables in the file to prepare openmetadata configurations. @@ -367,7 +367,7 @@ installation. ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.3.x-SNAPSHOT/deployment/security/index.md b/content/v1.3.x-SNAPSHOT/deployment/security/index.md index 2262c9469..773a1dfab 100644 --- a/content/v1.3.x-SNAPSHOT/deployment/security/index.md +++ b/content/v1.3.x-SNAPSHOT/deployment/security/index.md @@ -11,7 +11,7 @@ OpenMetadata has support for Google SSO, Okta SSO, custom OIDC, Auth0, Azure SSO Enabling Security is only required for your **Production** installation. If you are testing OpenMetadata, it will be easier and faster to set up without security. To get up and running quickly with OpenMetadata (without security), -please follow the [Quickstart](/quick-start/local-deployment) guide. +please follow the [Quickstart](/quick-start) guide. {%inlineCalloutContainer%} {%inlineCallout diff --git a/content/v1.3.x-SNAPSHOT/deployment/security/okta/bare-metal.md b/content/v1.3.x-SNAPSHOT/deployment/security/okta/bare-metal.md index 6b7b00b81..0cf9848f1 100644 --- a/content/v1.3.x-SNAPSHOT/deployment/security/okta/bare-metal.md +++ b/content/v1.3.x-SNAPSHOT/deployment/security/okta/bare-metal.md @@ -67,5 +67,5 @@ airflowConfiguration: metadataApiEndpoint: ${SERVER_HOST_API_URL:-http://localhost:8585/api} ``` -**Note:** Follow [this](/how-to-guides/feature-configurations/bots) guide to configure the `ingestion-bot` credentials for +**Note:** Follow [this](/developers/bots) guide to configure the `ingestion-bot` credentials for ingesting data from Airflow. \ No newline at end of file diff --git a/content/v1.3.x-SNAPSHOT/deployment/security/okta/docker.md b/content/v1.3.x-SNAPSHOT/deployment/security/okta/docker.md index 9878b2fa8..3f057f20a 100644 --- a/content/v1.3.x-SNAPSHOT/deployment/security/okta/docker.md +++ b/content/v1.3.x-SNAPSHOT/deployment/security/okta/docker.md @@ -74,7 +74,7 @@ AUTHENTICATION_CLIENT_ID={CLIENT_ID - SPA APP} # Update with your Client ID AUTHENTICATION_CALLBACK_URL=http://localhost:8585/callback ``` -**Note:** Follow [this](/how-to-guides/feature-configurations/bots) guide to configure the `ingestion-bot` credentials for +**Note:** Follow [this](/developers/bots) guide to configure the `ingestion-bot` credentials for ingesting data from Airflow. ## 2. Start Docker diff --git a/content/v1.3.x-SNAPSHOT/deployment/upgrade/kubernetes.md b/content/v1.3.x-SNAPSHOT/deployment/upgrade/kubernetes.md index e95756971..31d8302ad 100644 --- a/content/v1.3.x-SNAPSHOT/deployment/upgrade/kubernetes.md +++ b/content/v1.3.x-SNAPSHOT/deployment/upgrade/kubernetes.md @@ -108,7 +108,7 @@ openmetadata: - global: Additional property airflow is not allowed ``` -This means the values passed to the helm charts has a section `global.airflow`. As per the breaking changes mentioned [here](/deployment/upgrade/versions/013-to-100#airflow-configuration-&-pipeline-service-client), Airflow configs are replaced with pipelineServiceClient for Helm Charts. +This means the values passed to the helm charts has a section `global.airflow`. As per the breaking changes mentioned [here](/deployment/upgrade/versions/100-to-110#pipeline-service-client-configuration), Airflow configs are replaced with pipelineServiceClient for Helm Charts. The Helm Chart Values JSON Schema helps to catch the above breaking changes and this section will help you resolve and update your configurations for the same. You can read more about JSON Schema with Helm Charts [here](https://helm.sh/docs/topics/charts/#schema-files). diff --git a/content/v1.3.x-SNAPSHOT/how-to-guides/admin-guide/how-to-ingest-metadata.md b/content/v1.3.x-SNAPSHOT/how-to-guides/admin-guide/how-to-ingest-metadata.md index 1d54412af..30c6abbf7 100644 --- a/content/v1.3.x-SNAPSHOT/how-to-guides/admin-guide/how-to-ingest-metadata.md +++ b/content/v1.3.x-SNAPSHOT/how-to-guides/admin-guide/how-to-ingest-metadata.md @@ -172,6 +172,6 @@ Admin users can create, edit, or delete services. They can also view the connect color="violet-70" bold="Delete a Service Connection" icon="MdArrowForward" - href="/how-to-guides/admin-guide/how-to-ingest-metadata/delete-service-connection"%} + href="/how-to-guides/admin-guide/delete-service-connection"%} Permanently delete a service connection. {%/inlineCallout%} \ No newline at end of file diff --git a/content/v1.3.x-SNAPSHOT/how-to-guides/index.md b/content/v1.3.x-SNAPSHOT/how-to-guides/index.md index 05c3d33e5..776a0669c 100644 --- a/content/v1.3.x-SNAPSHOT/how-to-guides/index.md +++ b/content/v1.3.x-SNAPSHOT/how-to-guides/index.md @@ -87,7 +87,7 @@ OpenMetadata is a complete package for data teams to break down team silos, shar - Enhance organizational **[Data Culture](/how-to-guides/data-insights)** to gain crucial insights to drive innovation. -- Define your **[Glossary](/how-to-guides/data-governance/glossary-classification)** to build a common understanding of terms within your organization. +- Define your **[Glossary](/how-to-guides/data-governance/glossary)** to build a common understanding of terms within your organization. - Implement **[Data Governance](/how-to-guides/data-governance)** to maintain data integrity, security, and compliance. diff --git a/content/v1.3.x-SNAPSHOT/how-to-guides/user-guide-data-users/tags.md b/content/v1.3.x-SNAPSHOT/how-to-guides/user-guide-data-users/tags.md index d59ffeb79..f47eed543 100644 --- a/content/v1.3.x-SNAPSHOT/how-to-guides/user-guide-data-users/tags.md +++ b/content/v1.3.x-SNAPSHOT/how-to-guides/user-guide-data-users/tags.md @@ -36,7 +36,7 @@ You can view all the tags in the right panel. Data assets can also be classified using Tiers. Learn more about [Tiers](/how-to-guides/data-governance/classification/tiers). -Among the Classification Tags, OpenMetadata has some System Classification. Learn more about the [System Tags](/how-to-guides/data-governance/classification/classification). +Among the Classification Tags, OpenMetadata has some System Classification. Learn more about the [System Tags](/how-to-guides/data-governance/classification/overview#classification-in-openmetadata). ## Auto-Classification in OpenMetadata diff --git a/content/v1.3.x-SNAPSHOT/quick-start/local-docker-deployment.md b/content/v1.3.x-SNAPSHOT/quick-start/local-docker-deployment.md index 373564d5c..4dc36216f 100644 --- a/content/v1.3.x-SNAPSHOT/quick-start/local-docker-deployment.md +++ b/content/v1.3.x-SNAPSHOT/quick-start/local-docker-deployment.md @@ -254,7 +254,7 @@ installation. ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.3.x-SNAPSHOT/quick-start/local-kubernetes-deployment.md b/content/v1.3.x-SNAPSHOT/quick-start/local-kubernetes-deployment.md index ded99232e..15aa6541a 100644 --- a/content/v1.3.x-SNAPSHOT/quick-start/local-kubernetes-deployment.md +++ b/content/v1.3.x-SNAPSHOT/quick-start/local-kubernetes-deployment.md @@ -187,7 +187,7 @@ For more information, visit the kubectl logs command line reference documentatio ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs. diff --git a/content/v1.3.x-SNAPSHOT/quick-start/sandbox.md b/content/v1.3.x-SNAPSHOT/quick-start/sandbox.md index 9e0211da9..a8e5b8868 100644 --- a/content/v1.3.x-SNAPSHOT/quick-start/sandbox.md +++ b/content/v1.3.x-SNAPSHOT/quick-start/sandbox.md @@ -34,7 +34,7 @@ alt="tour" /%} ## Next Steps -1. Visit the [Features](/releases/features) overview page and explore the OpenMetadata UI. +1. Refer the [How-to Guides](/how-to-guides) for an overview of all the features in OpenMetadata. 2. Visit the [Connectors](/connectors) documentation to see what services you can integrate with OpenMetadata. 3. Visit the [API](/swagger.html) documentation and explore the rich set of OpenMetadata APIs.