diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index ca0796180cd57..3f8ea7a4c88eb 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -20,7 +20,7 @@ inputs: required: false images: - # e.g. linkedin/datahub-gms + # e.g. acryldata/datahub-gms description: "List of Docker images to use as base name for tags" required: true build-args: diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 38ffa3484c0bf..5396e6f17cb97 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -17,12 +17,12 @@ concurrency: cancel-in-progress: true env: - DATAHUB_GMS_IMAGE: "linkedin/datahub-gms" - DATAHUB_FRONTEND_IMAGE: "linkedin/datahub-frontend-react" - DATAHUB_MAE_CONSUMER_IMAGE: "linkedin/datahub-mae-consumer" - DATAHUB_MCE_CONSUMER_IMAGE: "linkedin/datahub-mce-consumer" - DATAHUB_KAFKA_SETUP_IMAGE: "linkedin/datahub-kafka-setup" - DATAHUB_ELASTIC_SETUP_IMAGE: "linkedin/datahub-elasticsearch-setup" + DATAHUB_GMS_IMAGE: "acryldata/datahub-gms" + DATAHUB_FRONTEND_IMAGE: "acryldata/datahub-frontend-react" + DATAHUB_MAE_CONSUMER_IMAGE: "acryldata/datahub-mae-consumer" + DATAHUB_MCE_CONSUMER_IMAGE: "acryldata/datahub-mce-consumer" + DATAHUB_KAFKA_SETUP_IMAGE: "acryldata/datahub-kafka-setup" + DATAHUB_ELASTIC_SETUP_IMAGE: "acryldata/datahub-elasticsearch-setup" DATAHUB_MYSQL_SETUP_IMAGE: "acryldata/datahub-mysql-setup" DATAHUB_UPGRADE_IMAGE: "acryldata/datahub-upgrade" DATAHUB_INGESTION_BASE_IMAGE: "acryldata/datahub-ingestion-base" diff --git a/README.md b/README.md index 6b8fa520e432e..dddb32da73f23 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ HOSTED_DOCS_ONLY--> [![Version](https://img.shields.io/github/v/release/datahub-project/datahub?include_prereleases)](https://github.com/datahub-project/datahub/releases/latest) [![PyPI version](https://badge.fury.io/py/acryl-datahub.svg)](https://badge.fury.io/py/acryl-datahub) [![build & test](https://github.com/datahub-project/datahub/workflows/build%20&%20test/badge.svg?branch=master&event=push)](https://github.com/datahub-project/datahub/actions?query=workflow%3A%22build+%26+test%22+branch%3Amaster+event%3Apush) -[![Docker Pulls](https://img.shields.io/docker/pulls/linkedin/datahub-gms.svg)](https://hub.docker.com/r/linkedin/datahub-gms) +[![Docker Pulls](https://img.shields.io/docker/pulls/acryldata/datahub-gms.svg)](https://hub.docker.com/r/acryldata/datahub-gms) [![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.datahubproject.io) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/datahub-project/datahub/blob/master/docs/CONTRIBUTING.md) [![GitHub commit activity](https://img.shields.io/github/commit-activity/m/datahub-project/datahub)](https://github.com/datahub-project/datahub/pulls?q=is%3Apr) diff --git a/build.gradle b/build.gradle index 0d9c0f5dc18b0..5cf43755fceff 100644 --- a/build.gradle +++ b/build.gradle @@ -55,7 +55,7 @@ buildscript { ext.openLineageVersion = '1.5.0' ext.logbackClassicJava8 = '1.2.12' - ext.docker_registry = 'linkedin' + ext.docker_registry = 'acryldata' apply from: './repositories.gradle' buildscript.repositories.addAll(project.repositories) diff --git a/docker/README.md b/docker/README.md index 3510649707c65..ad847dc70cf3c 100644 --- a/docker/README.md +++ b/docker/README.md @@ -26,13 +26,13 @@ DataHub Docker Images: Do not use `latest` or `debug` tags for any of the image as those are not supported and present only due to legacy reasons. Please use `head` or tags specific for versions like `v0.8.40`. For production we recommend using version specific tags not `head`. * [acryldata/datahub-ingestion](https://hub.docker.com/r/acryldata/datahub-ingestion/) -* [linkedin/datahub-gms](https://hub.docker.com/repository/docker/linkedin/datahub-gms/) -* [linkedin/datahub-frontend-react](https://hub.docker.com/repository/docker/linkedin/datahub-frontend-react/) -* [linkedin/datahub-mae-consumer](https://hub.docker.com/repository/docker/linkedin/datahub-mae-consumer/) -* [linkedin/datahub-mce-consumer](https://hub.docker.com/repository/docker/linkedin/datahub-mce-consumer/) +* [acryldata/datahub-gms](https://hub.docker.com/repository/docker/acryldata/datahub-gms/) +* [acryldata/datahub-frontend-react](https://hub.docker.com/repository/docker/acryldata/datahub-frontend-react/) +* [acryldata/datahub-mae-consumer](https://hub.docker.com/repository/docker/acryldata/datahub-mae-consumer/) +* [acryldata/datahub-mce-consumer](https://hub.docker.com/repository/docker/acryldata/datahub-mce-consumer/) * [acryldata/datahub-upgrade](https://hub.docker.com/r/acryldata/datahub-upgrade/) -* [linkedin/datahub-kafka-setup](https://hub.docker.com/r/acryldata/datahub-kafka-setup/) -* [linkedin/datahub-elasticsearch-setup](https://hub.docker.com/r/linkedin/datahub-elasticsearch-setup/) +* [acryldata/datahub-kafka-setup](https://hub.docker.com/r/acryldata/datahub-kafka-setup/) +* [acryldata/datahub-elasticsearch-setup](https://hub.docker.com/r/acryldata/datahub-elasticsearch-setup/) * [acryldata/datahub-mysql-setup](https://hub.docker.com/r/acryldata/datahub-mysql-setup/) * [acryldata/datahub-postgres-setup](https://hub.docker.com/r/acryldata/datahub-postgres-setup/) * [acryldata/datahub-actions](https://hub.docker.com/r/acryldata/datahub-actions). Do not use `acryldata/acryl-datahub-actions` as that is deprecated and no longer used. diff --git a/docker/docker-compose-with-cassandra.yml b/docker/docker-compose-with-cassandra.yml index c99b6e97b4d80..d722b07b9a7af 100644 --- a/docker/docker-compose-with-cassandra.yml +++ b/docker/docker-compose-with-cassandra.yml @@ -8,7 +8,7 @@ version: '3.9' services: datahub-frontend-react: hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - 9002:9002 build: @@ -32,7 +32,7 @@ services: condition: service_healthy datahub-gms: hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - 8080:8080 build: @@ -85,7 +85,7 @@ services: # This "container" is a workaround to pre-create search indices elasticsearch-setup: hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 0ea61e4be7281..eae36fb849fd5 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -8,7 +8,7 @@ version: '3.9' services: datahub-frontend-react: hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 build: @@ -33,7 +33,7 @@ services: condition: service_healthy datahub-gms: hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 build: @@ -73,7 +73,7 @@ services: # This "container" is a workaround to pre-create search indices elasticsearch-setup: hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile @@ -88,7 +88,7 @@ services: datahub_setup_job: true kafka-setup: hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} build: dockerfile: ./docker/kafka-setup/Dockerfile context: ../ diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index b1c492c4c7df9..f1aa6b30cede0 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -7,7 +7,7 @@ services: - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 build: @@ -19,7 +19,7 @@ services: - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} datahub-mce-consumer: hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 build: diff --git a/docker/docker-compose.consumers.dev.yml b/docker/docker-compose.consumers.dev.yml index 5c272a9cf9b8a..00f7b52df151f 100644 --- a/docker/docker-compose.consumers.dev.yml +++ b/docker/docker-compose.consumers.dev.yml @@ -1,7 +1,7 @@ version: '3.9' services: datahub-mae-consumer: - image: linkedin/datahub-mae-consumer:debug + image: acryldata/datahub-mae-consumer:debug build: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile @@ -13,7 +13,7 @@ services: - ../metadata-jobs/mae-consumer-job/build/libs/:/datahub/datahub-mae-consumer/bin/ - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml datahub-mce-consumer: - image: linkedin/datahub-mce-consumer:debug + image: acryldata/datahub-mce-consumer:debug build: context: ../ dockerfile: docker/datahub-mce-consumer/Dockerfile diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 977e29b9a4abc..74b9adaeb9948 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -7,7 +7,7 @@ services: - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 build: @@ -22,7 +22,7 @@ services: condition: service_healthy datahub-mce-consumer: hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 build: diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index c7a3c5098d940..b6ac43a9eda43 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -11,7 +11,7 @@ version: '3.9' services: datahub-frontend-react: - image: linkedin/datahub-frontend-react:head + image: acryldata/datahub-frontend-react:head ports: - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 @@ -27,7 +27,7 @@ services: - ../datahub-frontend/build/stage/main:/datahub-frontend - ./monitoring/client-prometheus-config.yaml:/datahub-frontend/client-prometheus-config.yaml datahub-gms: - image: linkedin/datahub-gms:debug + image: acryldata/datahub-gms:debug ports: - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 @@ -76,7 +76,7 @@ services: - ${HOME}/.datahub/plugins:/etc/datahub/plugins # Pre-creates the search indices using local mapping/settings.json elasticsearch-setup: - image: linkedin/datahub-elasticsearch-setup:head + image: acryldata/datahub-elasticsearch-setup:head build: context: elasticsearch-setup dockerfile: Dockerfile diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 79cd72a487a37..96f37496859a4 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -8,7 +8,7 @@ version: '3.9' services: datahub-frontend-react: hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 build: @@ -32,7 +32,7 @@ services: condition: service_healthy datahub-gms: hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} ports: @@ -75,7 +75,7 @@ services: # This "container" is a workaround to pre-create search indices elasticsearch-setup: hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile @@ -93,7 +93,7 @@ services: # explicitly wait for this container kafka-setup: hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} build: dockerfile: ./docker/kafka-setup/Dockerfile context: ../ diff --git a/docker/ingestion/docker-compose.yml b/docker/ingestion/docker-compose.yml index 2ba6872c0313a..06d4e47aa4a40 100644 --- a/docker/ingestion/docker-compose.yml +++ b/docker/ingestion/docker-compose.yml @@ -5,7 +5,7 @@ services: build: context: ../../ dockerfile: docker/datahub-ingestion/Dockerfile - image: linkedin/datahub-ingestion:${DATAHUB_VERSION:-head} + image: acryldata/datahub-ingestion:${DATAHUB_VERSION:-head} hostname: ingestion command: "ingest -c /sample_recipe.yml" volumes: diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index 4b2e7417fa61c..345493ba51650 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -1,7 +1,7 @@ x-datahub-frontend-service: &datahub-frontend-service hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 env_file: @@ -15,7 +15,7 @@ x-datahub-frontend-service: &datahub-frontend-service x-datahub-frontend-service-dev: &datahub-frontend-service-dev <<: *datahub-frontend-service - image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:debug + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-frontend-react}:debug ports: - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index 961bd4464af95..e9baa65290e50 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -90,7 +90,7 @@ x-datahub-system-update-service-dev: &datahub-system-update-service-dev ################################# x-datahub-gms-service: &datahub-gms-service hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 env_file: @@ -112,7 +112,7 @@ x-datahub-gms-service: &datahub-gms-service x-datahub-gms-service-dev: &datahub-gms-service-dev <<: *datahub-gms-service - image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:debug + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-gms}:debug ports: - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 @@ -137,7 +137,7 @@ x-datahub-gms-service-dev: &datahub-gms-service-dev ################################# x-datahub-mae-consumer-service: &datahub-mae-consumer-service hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 env_file: @@ -149,7 +149,7 @@ x-datahub-mae-consumer-service: &datahub-mae-consumer-service x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev <<: *datahub-mae-consumer-service - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:debug + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mae-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] volumes: @@ -163,7 +163,7 @@ x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev ################################# x-datahub-mce-consumer-service: &datahub-mce-consumer-service hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 env_file: @@ -175,7 +175,7 @@ x-datahub-mce-consumer-service: &datahub-mce-consumer-service x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev <<: *datahub-mce-consumer-service - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:debug + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mce-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] volumes: diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index 7b1f6b8c99c0e..8de220093dda5 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -256,7 +256,7 @@ services: kafka-setup: &kafka-setup profiles: *profiles-quickstart hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-kafka-setup}:${DATAHUB_VERSION:-head} env_file: kafka-setup/env/docker.env environment: &kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} @@ -273,7 +273,7 @@ services: environment: <<: *kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:debug + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-kafka-setup}:debug elasticsearch: profiles: *elasticsearch-profiles hostname: search @@ -297,7 +297,7 @@ services: volumes: - esdata:/usr/share/elasticsearch/data elasticsearch-setup-dev: &elasticsearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:debug profiles: *elasticsearch-profiles hostname: elasticsearch-setup env_file: elasticsearch-setup/env/docker.env @@ -335,7 +335,7 @@ services: <<: *elasticsearch-setup-dev profiles: *opensearch-profiles-quickstart hostname: opensearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} @@ -348,7 +348,7 @@ services: <<: *opensearch-setup profiles: *opensearch-profiles-dev hostname: opensearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:debug environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 50f0c90c83175..d2ac2f151fcbb 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -64,7 +64,7 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: @@ -111,7 +111,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -185,7 +185,7 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: @@ -200,7 +200,7 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index 9608b4383ab5d..1ba467d7fb928 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -64,7 +64,7 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: @@ -106,7 +106,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -178,7 +178,7 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: @@ -193,7 +193,7 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 176e5539aa491..893af253095bf 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -64,7 +64,7 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: @@ -106,7 +106,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -178,7 +178,7 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: @@ -193,7 +193,7 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index f61bb53d72ecc..a4211acedcf10 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -20,7 +20,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 datahub-mce-consumer: @@ -52,7 +52,7 @@ services: - PE_CONSUMER_ENABLED=false - UI_INGESTION_ENABLED=false hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 version: '3.9' diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index 3ceb5d537ffd8..e7571e4baf8b4 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -27,7 +27,7 @@ services: - GRAPH_SERVICE_IMPL=neo4j - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 datahub-mce-consumer: @@ -66,7 +66,7 @@ services: - PE_CONSUMER_ENABLED=false - UI_INGESTION_ENABLED=false hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 version: '3.9' diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index e39695f52a437..f3490ce502626 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -64,7 +64,7 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: @@ -111,7 +111,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -185,7 +185,7 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: @@ -200,7 +200,7 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: diff --git a/docs/authentication/guides/add-users.md b/docs/authentication/guides/add-users.md index d380cacd6665e..86dac3ea328e5 100644 --- a/docs/authentication/guides/add-users.md +++ b/docs/authentication/guides/add-users.md @@ -134,7 +134,7 @@ For example, to mount a user.props file that is stored on my local filesystem at build: context: ../ dockerfile: docker/datahub-frontend/Dockerfile - image: linkedin/datahub-frontend-react:${DATAHUB_VERSION:-head} + image: acryldata/datahub-frontend-react:${DATAHUB_VERSION:-head} ..... # The new stuff volumes: diff --git a/docs/authentication/guides/jaas.md b/docs/authentication/guides/jaas.md index 6268d608f4926..42a87a781bd00 100644 --- a/docs/authentication/guides/jaas.md +++ b/docs/authentication/guides/jaas.md @@ -29,7 +29,7 @@ datahub-frontend-react: build: context: ../ dockerfile: docker/datahub-frontend/Dockerfile - image: linkedin/datahub-frontend-react:${DATAHUB_VERSION:-head} + image: acryldata/datahub-frontend-react:${DATAHUB_VERSION:-head} env_file: datahub-frontend/env/docker.env hostname: datahub-frontend-react container_name: datahub-frontend-react @@ -56,7 +56,7 @@ datahub-frontend-react: build: context: ../ dockerfile: docker/datahub-frontend/Dockerfile - image: linkedin/datahub-frontend-react:${DATAHUB_VERSION:-head} + image: acryldata/datahub-frontend-react:${DATAHUB_VERSION:-head} env_file: datahub-frontend/env/docker.env hostname: datahub-frontend-react container_name: datahub-frontend-react diff --git a/docs/authentication/guides/sso/configure-oidc-behind-proxy.md b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md index 684bf768f2baf..c00f43228ea1e 100644 --- a/docs/authentication/guides/sso/configure-oidc-behind-proxy.md +++ b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md @@ -34,7 +34,7 @@ To build a custom image for your frontend, with the certificates built-in, you c Example Dockerfile: ```dockerfile -FROM linkedin/datahub-frontend-react: +FROM acryldata/datahub-frontend-react: COPY /truststore-directory /certificates ``` diff --git a/docs/deploy/aws.md b/docs/deploy/aws.md index d060eddd9acc8..355ed41467008 100644 --- a/docs/deploy/aws.md +++ b/docs/deploy/aws.md @@ -137,7 +137,7 @@ file used to deploy datahub). Change datahub-frontend values to the following. datahub-frontend: enabled: true image: - repository: linkedin/datahub-frontend-react + repository: acryldata/datahub-frontend-react tag: "latest" ingress: enabled: true @@ -305,7 +305,7 @@ a different way of creating time based indices. elasticsearchSetupJob: enabled: true image: - repository: linkedin/datahub-elasticsearch-setup + repository: acryldata/datahub-elasticsearch-setup tag: "***" extraEnvs: - name: USE_AWS_ELASTICSEARCH diff --git a/docs/deploy/azure.md b/docs/deploy/azure.md index b940b82827e94..6ddd5fc5ba1d6 100644 --- a/docs/deploy/azure.md +++ b/docs/deploy/azure.md @@ -165,7 +165,7 @@ In order to use the ingress controller to expose frontend pod, we need to update datahub-frontend: enabled: true image: - repository: linkedin/datahub-frontend-react + repository: acryldata/datahub-frontend-react # tag: "v0.10.0 # defaults to .global.datahub.version # Set up ingress to expose react front-end diff --git a/docs/docker/development.md b/docs/docker/development.md index 91a303744a03b..35c708a4ac490 100644 --- a/docs/docker/development.md +++ b/docs/docker/development.md @@ -30,12 +30,12 @@ containers with remote debugging ports enabled. Once the `debug` docker images are constructed you'll see images similar to the following: ```shell -linkedin/datahub-frontend-react debug e52fef698025 28 minutes ago 763MB -linkedin/datahub-kafka-setup debug 3375aaa2b12d 55 minutes ago 659MB -linkedin/datahub-gms debug ea2b0a8ea115 56 minutes ago 408MB +acryldata/datahub-frontend-react debug e52fef698025 28 minutes ago 763MB +acryldata/datahub-kafka-setup debug 3375aaa2b12d 55 minutes ago 659MB +acryldata/datahub-gms debug ea2b0a8ea115 56 minutes ago 408MB acryldata/datahub-upgrade debug 322377a7a21d 56 minutes ago 463MB acryldata/datahub-mysql-setup debug 17768edcc3e5 2 hours ago 58.2MB -linkedin/datahub-elasticsearch-setup debug 4d935be7c62c 2 hours ago 26.1MB +acryldata/datahub-elasticsearch-setup debug 4d935be7c62c 2 hours ago 26.1MB ``` At this point it is possible to view the DataHub UI at `http://localhost:9002` as you normally would with quickstart. diff --git a/docs/how/extract-container-logs.md b/docs/how/extract-container-logs.md index b5fbb4c83cc64..d702a0acc9123 100644 --- a/docs/how/extract-container-logs.md +++ b/docs/how/extract-container-logs.md @@ -15,8 +15,8 @@ To do so, you can view all containers that Docker knows about by running the fol ``` johnjoyce@Johns-MBP datahub-fork % docker container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -6c4a280bc457 linkedin/datahub-frontend-react "datahub-frontend/bi…" 5 days ago Up 46 hours (healthy) 0.0.0.0:9002->9002/tcp datahub-frontend-react -122a2488ab63 linkedin/datahub-gms "/bin/sh -c /datahub…" 5 days ago Up 5 days (healthy) 0.0.0.0:8080->8080/tcp datahub-gms +6c4a280bc457 acryldata/datahub-frontend-react "datahub-frontend/bi…" 5 days ago Up 46 hours (healthy) 0.0.0.0:9002->9002/tcp datahub-frontend-react +122a2488ab63 acryldata/datahub-gms "/bin/sh -c /datahub…" 5 days ago Up 5 days (healthy) 0.0.0.0:8080->8080/tcp datahub-gms 7682dcc64afa confluentinc/cp-schema-registry:5.4.0 "/etc/confluent/dock…" 5 days ago Up 5 days 0.0.0.0:8081->8081/tcp schema-registry 3680fcaef3ed confluentinc/cp-kafka:5.4.0 "/etc/confluent/dock…" 5 days ago Up 5 days 0.0.0.0:9092->9092/tcp, 0.0.0.0:29092->29092/tcp broker 9d6730ddd4c4 neo4j:4.0.6 "/sbin/tini -g -- /d…" 5 days ago Up 5 days 0.0.0.0:7474->7474/tcp, 7473/tcp, 0.0.0.0:7687->7687/tcp neo4j diff --git a/docs/troubleshooting/quickstart.md b/docs/troubleshooting/quickstart.md index 0392ffc426a6c..0dfe11179083c 100644 --- a/docs/troubleshooting/quickstart.md +++ b/docs/troubleshooting/quickstart.md @@ -88,10 +88,10 @@ You can list all Docker containers in your local by running `docker container ls ``` CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -979830a342ce linkedin/datahub-mce-consumer:latest "bash -c 'while ping…" 10 hours ago Up 10 hours datahub-mce-consumer -3abfc72e205d linkedin/datahub-frontend-react:latest "datahub-frontend…" 10 hours ago Up 10 hours 0.0.0.0:9002->9002/tcp datahub-frontend -50b2308a8efd linkedin/datahub-mae-consumer:latest "bash -c 'while ping…" 10 hours ago Up 10 hours datahub-mae-consumer -4d6b03d77113 linkedin/datahub-gms:latest "bash -c 'dockerize …" 10 hours ago Up 10 hours 0.0.0.0:8080->8080/tcp datahub-gms +979830a342ce acryldata/datahub-mce-consumer:latest "bash -c 'while ping…" 10 hours ago Up 10 hours datahub-mce-consumer +3abfc72e205d acryldata/datahub-frontend-react:latest "datahub-frontend…" 10 hours ago Up 10 hours 0.0.0.0:9002->9002/tcp datahub-frontend +50b2308a8efd acryldata/datahub-mae-consumer:latest "bash -c 'while ping…" 10 hours ago Up 10 hours datahub-mae-consumer +4d6b03d77113 acryldata/datahub-gms:latest "bash -c 'dockerize …" 10 hours ago Up 10 hours 0.0.0.0:8080->8080/tcp datahub-gms c267c287a235 landoop/schema-registry-ui:latest "/run.sh" 10 hours ago Up 10 hours 0.0.0.0:8000->8000/tcp schema-registry-ui 4b38899cc29a confluentinc/cp-schema-registry:5.2.1 "/etc/confluent/dock…" 10 hours ago Up 10 hours 0.0.0.0:8081->8081/tcp schema-registry 37c29781a263 confluentinc/cp-kafka:5.2.1 "/etc/confluent/dock…" 10 hours ago Up 10 hours 0.0.0.0:9092->9092/tcp, 0.0.0.0:29092->29092/tcp broker diff --git a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py index a37f6ad8d279e..007b7487cb6a4 100644 --- a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py +++ b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py @@ -104,7 +104,7 @@ def __post_init__(self) -> None: self.report.gms_version = ( gms_config.get("versions", {}) - .get("linkedin/datahub", {}) + .get("acryldata/datahub", {}) .get("version", "") ) self.report.max_threads = self.config.max_threads diff --git a/metadata-ingestion/src/datahub/telemetry/telemetry.py b/metadata-ingestion/src/datahub/telemetry/telemetry.py index a802125e76b4e..08df9e80ecf29 100644 --- a/metadata-ingestion/src/datahub/telemetry/telemetry.py +++ b/metadata-ingestion/src/datahub/telemetry/telemetry.py @@ -335,7 +335,7 @@ def _server_props(self, server: Optional[DataHubGraph]) -> Dict[str, str]: "serverType", "missing" ), "server_version": server.server_config.get("versions", {}) - .get("linkedin/datahub", {}) + .get("acryldata/datahub", {}) .get("version", "missing"), "server_id": server.server_id or "missing", } diff --git a/metadata-ingestion/src/datahub/upgrade/upgrade.py b/metadata-ingestion/src/datahub/upgrade/upgrade.py index 075bfd29008f6..446f1a05b71a6 100644 --- a/metadata-ingestion/src/datahub/upgrade/upgrade.py +++ b/metadata-ingestion/src/datahub/upgrade/upgrade.py @@ -139,10 +139,12 @@ async def get_server_version_stats( current_server_release_date = None if server_config: server_version_string = ( - server_config.get("versions", {}).get("linkedin/datahub", {}).get("version") + server_config.get("versions", {}) + .get("acryldata/datahub", {}) + .get("version") ) commit_hash = ( - server_config.get("versions", {}).get("linkedin/datahub", {}).get("commit") + server_config.get("versions", {}).get("acryldata/datahub", {}).get("commit") ) server_type = server_config.get("datahub", {}).get("serverType", "unknown") if server_type == "quickstart" and commit_hash: diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java index 686e2a816ffb5..280ca87d1cf04 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java @@ -39,7 +39,7 @@ public MclConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { config.put("noCode", "true"); Map versionConfig = new HashMap<>(); - versionConfig.put("linkedin/datahub", gitVersion.toConfig()); + versionConfig.put("acryldata/datahub", gitVersion.toConfig()); config.put("versions", versionConfig); configJson = OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(config); } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java index b04ecc7761eb6..3db6dfa79516e 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java @@ -39,7 +39,7 @@ public McpConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { config.put("noCode", "true"); Map versionConfig = new HashMap<>(); - versionConfig.put("linkedin/datahub", gitVersion.toConfig()); + versionConfig.put("acryldata/datahub", gitVersion.toConfig()); config.put("versions", versionConfig); configJson = OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(config); } diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java index cfa4c6425c131..33015c4adbec5 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java @@ -97,7 +97,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IO GitVersion version = getGitVersion(ctx); Map versionConfig = new HashMap<>(); - versionConfig.put("linkedin/datahub", version.toConfig()); + versionConfig.put("acryldata/datahub", version.toConfig()); config.put("versions", versionConfig); ConfigurationProvider configProvider = getConfigProvider(ctx); diff --git a/smoke-test/tests/read_only/test_services_up.py b/smoke-test/tests/read_only/test_services_up.py index 4e00f910ceb73..1fd43f884323c 100644 --- a/smoke-test/tests/read_only/test_services_up.py +++ b/smoke-test/tests/read_only/test_services_up.py @@ -25,7 +25,7 @@ def test_gms_config_accessible() -> None: assert gms_config is not None if DATAHUB_VERSION is not None: - assert gms_config["versions"]["linkedin/datahub"]["version"] == DATAHUB_VERSION + assert gms_config["versions"]["acryldata/datahub"]["version"] == DATAHUB_VERSION else: print("[WARN] TEST_DATAHUB_VERSION is not set")